I'm getting an error - "bad allocation" while working with the UHD library.
I'm trying to compile some basic code to learn more about the UHD library. After compiling the program I'm getting an error.
Code:
int UHD_SAFE_MAIN(int argc, char *argv[]) {
uhd::set_thread_priority_safe();
std::string device_args("addr=192.168.10.2");
std::string subdev("A:0");
std::string ant("TX/RX");
std::string ref("internal");
double rate(1e6);
double freq(915e6);
double gain(10);
double bw(1e6);
//create a usrp device
std::cout << std::endl;
std::cout << boost::format("Creating the usrp device with: %s...") %device_args << std::endl;
uhd::usrp::multi_usrp::sptr usrp = uhd::usrp::multi_usrp::make(device_args);
// Lock mboard clocks
std::cout << boost::format("Lock mboard clocks: %f") % ref << std::endl;
usrp->set_clock_source(ref);
//always select the subdevice first, the channel mapping affects the other settings
std::cout << boost::format("subdev set to: %f") % subdev << std::endl;
usrp->set_rx_subdev_spec(subdev);
std::cout << boost::format("Using Device: %s") % usrp->get_pp_string() << std::endl;
//set the sample rate
if (rate <= 0.0) {
std::cerr << "Please specify a valid sample rate" << std::endl;
return ~0;
}
// set sample rate
std::cout << boost::format("Setting RX Rate: %f Msps...") % (rate / 1e6) << std::endl;
usrp->set_rx_rate(rate);
std::cout << boost::format("Actual RX Rate: %f Msps...") % (usrp->get_rx_rate() / 1e6) << std::endl << std::endl;
// set freq
std::cout << boost::format("Setting RX Freq: %f MHz...") % (freq / 1e6) << std::endl;
uhd::tune_request_t tune_request(freq);
usrp->set_rx_freq(tune_request);
std::cout << boost::format("Actual RX Freq: %f MHz...") % (usrp->get_rx_freq() / 1e6) << std::endl << std::endl;
// set the rf gain
std::cout << boost::format("Setting RX Gain: %f dB...") % gain << std::endl;
usrp->set_rx_gain(gain);
std::cout << boost::format("Actual RX Gain: %f dB...") % usrp->get_rx_gain() << std::endl << std::endl;
// set the IF filter bandwidth
std::cout << boost::format("Setting RX Bandwidth: %f MHz...") % (bw / 1e6) << std::endl;
usrp->set_rx_bandwidth(bw);
std::cout << boost::format("Actual RX Bandwidth: %f MHz...") % (usrp->get_rx_bandwidth() / 1e6) << std::endl << std::endl;
// set the antenna
std::cout << boost::format("Setting RX Antenna: %s") % ant << std::endl;
usrp->set_rx_antenna(ant);
std::cout << boost::format("Actual RX Antenna: %s") % usrp->get_rx_antenna() << std::endl << std::endl;
return EXIT_SUCCESS;
}
Part of the code where the error occurs:
//create a usrp device
std::cout << std::endl;
std::cout << boost::format("Creating the usrp device with: %s...") %device_args << std::endl;
uhd::usrp::multi_usrp::sptr usrp = uhd::usrp::multi_usrp::make(device_args);
Error:enter image description here
I'm using:
Microsoft Visual C++ Express 2010
C++ language
UHD library, Win32_VS2010.exe, 003.007.003-release
Boost library 1_63_0
I do not connect any URSP device to my computer.
I don't know if the error is connected with UHD library or with the C++ language. I was trying to compile this program using different versions of Microsoft Visual Studio and different versions of the UHD library, including the latest one. I was even trying to compile this on different PC, but the result was similar, there wasn't an error which interrupted the program but i got string "error: bad allocation" in the console instead and program stopped working in the same spot.
When i first started compiling this program I didn't got "bad allocation error" (UHD_003.004.000 - release). I got an error which said - "Error: LookupError: KeyError: No device found for ----->. After that i decided to upgrade my UHD library version to the newer one (003.007.003) and then bad allocation error started occuring. I was trying to install back the previous version but it didn't help.
I was trying to change type of device_args, from string to uhd::device_addr_t, like it is said in manual on http://files.ettus.com/manual, but the error didn't disappear.
Any help would be appreciated.
"I do not connect any URSP device to my computer."
You cannot execute this code without having a USRP connected to the computer you are running it on.
when you call uhd::usrp::multi_usrp::make(device_args);
the uhd is trying to connected to a USRP with the IP address you have speciified in device args.
try connecting a usrp to your computer and try again
Related
I want to create my own Overclocking Monitor for which I need to read information like the current voltage, clockspeeds and others.
In C++ I can easily get the Information from Nvidia-smi with typing for example:
console("nvidia-smi -q -i voltage");
Which then displays me:
==============NVSMI LOG==============
Timestamp : Tue Dec 13 17:55:54 2022
Driver Version : 526.47
CUDA Version : 12.0
Attached GPUs : 1
GPU 00000000:01:00.0
Voltage
Graphics : 806.250 mV
From that I need only the voltage number, in this case "806.25".
I´ve investigated a bit into <cctype> which was something I´ve read about, but I´m not making any progress.
So how can I import only that number into my c++ Program? I´d just guess that the process will be the same for the other commands.
I don't currently have an Nvidia GPU to test this (stuck with Intel integrated graphics), so I can't import cuda.h but feel free to test this and let me know if it works or not.
#include <iostream>
#include <chrono>
#include <cuda.h>
int main() {
// Get the current timestamp
auto current_time = std::chrono::system_clock::now();
// Get the current driver version
int driver_version;
cudaDriverGetVersion(&driver_version);
// Get the current CUDA version
int cuda_version;
cudaRuntimeGetVersion(&cuda_version);
// Get the name of the attached GPU
cudaDeviceProp device_properties;
cudaGetDeviceProperties(&device_properties, 0);
std::string gpu_name = device_properties.name;
// Get the current voltage
int power_usage;
cudaDeviceGetPowerUsage(&power_usage, 0);
int voltage = power_usage / current;
// Output the overclocking data
std::cout << "Timestamp: " << current_time << std::endl;
std::cout << "Driver version: " << driver_version << std::endl;
std::cout << "CUDA version: " << cuda_version << std::endl;
std::cout << "Attached GPU: " << gpu_name << std::endl;
std::cout << "Voltage: " << voltage << std::endl;
return 0;
}
If it works then your voltage can be accessed from int voltage.
I have a program that uses the modbus protocol to send chunks of data between a 64-bit Raspberry Pi 4 (running Raspberry Pi OS 64) and a receiving computer. My intended setup for the serial port is baud rate of 57600, 8 data bits, two stop bits, no flow control, and no parity. I have noticed that the data is only properly interpreted when the receiving computer is set to view one stop bit and no parity, regardless of the settings on the Raspberry Pi.
What is interesting is this program works as expected when run on Windows, only the Pi has caused problems at the moment. This was originally seen in ASIO 1.20 and can still be reproduced in 1.24 on the Pi.
I wrote a minimal example that reproduces the issue for me on the Pi:
#include <asio.hpp>
#include <asio/serial_port.hpp>
#include <iostream>
int main(void) {
asio::io_service ioService;
asio::serial_port serialPort(ioService, "/dev/serial0");
serialPort.set_option(asio::serial_port_base::baud_rate(57600));
serialPort.set_option(asio::serial_port_base::character_size(8));
serialPort.set_option(asio::serial_port_base::stop_bits(asio::serial_port_base::stop_bits::two));
serialPort.set_option(asio::serial_port_base::flow_control(asio::serial_port_base::flow_control::none));
serialPort.set_option(asio::serial_port_base::parity(asio::serial_port_base::parity::none));
std::string test("Test#");
asio::write(serialPort, asio::buffer(test.data(), test.size()));
std::array<char, 5> buf;
asio::read(serialPort, asio::buffer(buf.data(), buf.size()));
std::cout << "Received: " << std::string(std::begin(buf), std::end(buf)) << std::endl;
serialPort.close();
return 0;
}
I looked closer at the issue and used a Saleae Logic Analyzer to see what data is being sent between the machines. Below you can see the expected behavior for a successful run, this is when the test is run on Windows.
Here you can see the behavior that occurs on the Raspberry Pi when it runs the test code. The analyzer fails to interpret the data using the parameters set in the code.
Below you can see that when the analyzer is set with one stop bit rather than two, it interprets the hex without an issue.
Overall you can see that the issue takes place on the Pi's end because of the responses seen in the logic analyzer. The program running on the Pi can interpret messages sent to it using the given parameters without any issue, however when it tries to reply to those messages it seems that the ASIO port settings are not being applied.
Any insight that can be provided would be very helpful. Let me know if you need more information. Thanks for the help!
UPDATE: Ran #sehe's test code as they recommended and results are as follows:
baud_rate: Success
character_size: Success
stop_bits: Success
flow_control: Success
parity: Success
parity: 0 (Success)
flow_control: 0 (Success)
stop_bits: 0 (Success)
character_size: 8 (Success)
baud_rate: 57600 (Success)
ModbusTest: Main.cpp:37: int main(): Assertion `sb.value() == serial_port::stop_bits::two' failed.
It appears that the setting for stop bits did not successfully apply and rather failed silently. Any ideas on how to proceed with further debugging?
UPDATE 2: Also wanted to mention that I ran minicom with the same hardware setup and was able to communicate without issue using two stop bits.
Very solid debugging and analysis info.
I don't immediately see something wrong with the code. My intuition was to separate construction from open(), so the options could be set prior to opening, but it turns out that is just not working.
So maybe you can verify that the set_option calls had their desired effect. I can imagine hardware limitations that don't allow certain configuration?
This should definitely uncover any unexpected behavior:
Live On Coliru
//#undef NDEBUG
#include <boost/asio.hpp>
#include <boost/asio/serial_port.hpp>
namespace asio = boost::asio;
using asio::serial_port;
using boost::system::error_code;
#include <iostream>
int main() {
asio::io_service ioService;
asio::serial_port sp(ioService);
sp.open("/dev/serial0");
serial_port::baud_rate br{57600};
serial_port::character_size cs{8};
serial_port::stop_bits sb{serial_port::stop_bits::two};
serial_port::flow_control fc{serial_port::flow_control::none};
serial_port::parity pb{serial_port::parity::none};
error_code ec;
if (!ec) { sp.set_option(br, ec); std::cout << "baud_rate: " << ec.message() << std::endl; }
if (!ec) { sp.set_option(cs, ec); std::cout << "character_size: " << ec.message() << std::endl; }
if (!ec) { sp.set_option(sb, ec); std::cout << "stop_bits: " << ec.message() << std::endl; }
if (!ec) { sp.set_option(fc, ec); std::cout << "flow_control: " << ec.message() << std::endl; }
if (!ec) { sp.set_option(pb, ec); std::cout << "parity: " << ec.message() << std::endl; }
sp.get_option(pb, ec); std::cout << "parity: " << pb.value() << " (" << ec.message() << ")" << std::endl;
sp.get_option(fc, ec); std::cout << "flow_control: " << fc.value() << " (" << ec.message() << ")" << std::endl;
sp.get_option(sb, ec); std::cout << "stop_bits: " << sb.value() << " (" << ec.message() << ")" << std::endl;
sp.get_option(cs, ec); std::cout << "character_size: " << cs.value() << " (" << ec.message() << ")" << std::endl;
sp.get_option(br, ec); std::cout << "baud_rate: " << br.value() << " (" << ec.message() << ")" << std::endl;
assert(br.value() == 57600);
assert(cs.value() == 8);
assert(sb.value() == serial_port::stop_bits::two);
assert(fc.value() == serial_port::flow_control::none);
assert(pb.value() == serial_port::parity::none);
std::string test("Test#");
write(sp, asio::buffer(test));
std::array<char, 5> buf;
auto n = read(sp, asio::buffer(buf));
std::cout << "Received: " << std::string(buf.data(), n) << std::endl;
}
Which on my system (Ubuntu host, using /dev/ttyS0) prints e.g.
baud_rate: Success
character_size: Success
stop_bits: Success
flow_control: Success
parity: Success
parity: 0 (Success)
flow_control: 0 (Success)
stop_bits: 2 (Success)
character_size: 8 (Success)
baud_rate: 57600 (Success)
As expected
I was able to discover the cause and fix the problem!
I am using a Raspberry Pi 4 for this project and interfacing with GPIO pins 14/15 to use /dev/serial0. With the default configuration /dev/serial0 maps to /dev/ttyS0 which is a mini UART and is not capable of using multiple stop bits, etc.
Disabling Bluetooth sets the symlink to map to /dev/ttyAMA0 which is a full UART and is capable of parity and multiple stop bits.
In /boot/config.txt I added the following lines:
[all]
dtoverlay=disable-bt
If you are experiencing a similar problem with /dev/serial0, this may be worth a shot.
I have trouble when using my USRP b200 mini. Indeed, I wasn’t able to use it in transmitter mode. I work with two b100 and one b200.
Until now, if I use one USRP b100 in transmitter mode and another one in receiver mode, everything works. If I use one USRP b100 in transmitter mode and my USRP b200 in receiver mode everything still works. But if I do the opposite, I am not able to detect my transmitted signal anymore.
Can someone could help me please ?
I use these C++ code lines to parameter my USRP:
void Radio_Tx_Rx::initialize(int TX){
printf("%s",KYEL);
if (TX){
cout << "TRANSMITTER INITIALISATION " << endl;
string usrp_addr("type=b200");
usrp = uhd::usrp::multi_usrp::make(usrp_addr);
usrp->set_tx_rate(fe);
usrp->set_tx_freq(fc);
usrp->set_tx_gain(20); //I tested gain from 0 to 80 with a step of 10
usrp->set_tx_antenna("TX/RX");
uhd::stream_args_t
stream_args("fc32");
tx_stream = usrp->get_tx_stream(stream_args);
cout << " " << string(50, '-') << endl;
usrp->issue_stream_cmd(uhd::stream_cmd_t::STREAM_MODE_START_CONTINUOUS);
} else {
cout << " RECEIVER INITIALISATION "<< endl;
string usrp_addr("type=b100");
usrp = uhd::usrp::multi_usrp::make(usrp_addr);
usrp->set_rx_rate(fe);
usrp->set_rx_freq(fc);
usrp->set_rx_antenna("TX/RX");
uhd::stream_args_t
stream_args("fc32");
rx_stream = usrp->get_rx_stream(stream_args);
cout << " " << string(50, '-') << endl;
usrp->issue_stream_cmd(uhd::stream_cmd_t::STREAM_MODE_START_CONTINUOUS);
printf("%s", KNRM);
}
Ive been trying to compile a code given in the following website to create an USRP Object
https://kb.ettus.com/Getting_Started_with_UHD_and_C%2B%2B
For the lazy ill just include the code:
#include <uhd/utils/thread_priority.hpp>
#include <uhd/utils/safe_main.hpp>
#include <uhd/usrp/multi_usrp.hpp>
#include <uhd/exception.hpp>
#include <uhd/types/tune_request.hpp>
#include <boost/program_options.hpp>
#include <boost/format.hpp>
#include <boost/thread.hpp>
#include <iostream>
int UHD_SAFE_MAIN(int argc, char *argv[]) {
uhd::set_thread_priority_safe();
std::string device_args("addr=192.168.10.2");
std::string subdev("A:0");
std::string ant("TX/RX");
std::string ref("internal");
double rate(1e6);
double freq(915e6);
double gain(10);
double bw(1e6);
//create a usrp device
std::cout << std::endl;
std::cout << boost::format("Creating the usrp device with: %s...") % device_args << std::endl;
uhd::usrp::multi_usrp::sptr usrp = uhd::usrp::multi_usrp::make(device_args);
// Lock mboard clocks
std::cout << boost::format("Lock mboard clocks: %f") % ref << std::endl;
usrp->set_clock_source(ref);
//always select the subdevice first, the channel mapping affects the other settings
std::cout << boost::format("subdev set to: %f") % subdev << std::endl;
usrp->set_rx_subdev_spec(subdev);
std::cout << boost::format("Using Device: %s") % usrp->get_pp_string() << std::endl;
//set the sample rate
if (rate <= 0.0) {
std::cerr << "Please specify a valid sample rate" << std::endl;
return ~0;
}
// set sample rate
std::cout << boost::format("Setting RX Rate: %f Msps...") % (rate / 1e6) << std::endl;
usrp->set_rx_rate(rate);
std::cout << boost::format("Actual RX Rate: %f Msps...") % (usrp->get_rx_rate() / 1e6) << std::endl << std::endl;
// set freq
std::cout << boost::format("Setting RX Freq: %f MHz...") % (freq / 1e6) << std::endl;
uhd::tune_request_t tune_request(freq);
usrp->set_rx_freq(tune_request);
std::cout << boost::format("Actual RX Freq: %f MHz...") % (usrp->get_rx_freq() / 1e6) << std::endl << std::endl;
// set the rf gain
std::cout << boost::format("Setting RX Gain: %f dB...") % gain << std::endl;
usrp->set_rx_gain(gain);
std::cout << boost::format("Actual RX Gain: %f dB...") % usrp->get_rx_gain() << std::endl << std::endl;
// set the IF filter bandwidth
std::cout << boost::format("Setting RX Bandwidth: %f MHz...") % (bw / 1e6) << std::endl;
usrp->set_rx_bandwidth(bw);
std::cout << boost::format("Actual RX Bandwidth: %f MHz...") % (usrp->get_rx_bandwidth() / 1e6) << std::endl << std::endl;
// set the antenna
std::cout << boost::format("Setting RX Antenna: %s") % ant << std::endl;
usrp->set_rx_antenna(ant);
std::cout << boost::format("Actual RX Antenna: %s") % usrp->get_rx_antenna() << std::endl << std::endl;
return EXIT_SUCCESS;
}
I at first was utilizing GCC compiler via Code Blocks then decided to test gcc and g++ with MinGW
via command lines, both at the time of compiling resulted in this:
main.cpp:17: undefined reference to `_imp___ZN3uhd24set_thread_priority_safeEfb'
main.cpp:32: undefined reference to `_imp___ZN3uhd13device_addr_tC1ERKSs'
main.cpp:32: undefined reference to `_imp___ZN3uhd4usrp10multi_usrp4makeERKNS_13device_addr_tE'
main.cpp:40: undefined reference to `_imp___ZN3uhd4usrp13subdev_spec_tC1ERKSs'
main.cpp:56: undefined reference to `_imp___ZN3uhd14tune_request_tC1Ed'
obj\Debug\main.o: In function `ZN3uhd4usrp10multi_usrp11set_rx_gainEdj':
multi_usrp.hpp:595: undefined reference to `_imp___ZN3uhd4usrp10multi_usrp9ALL_GAINSE'
obj\Debug\main.o: In function `ZN3uhd4usrp10multi_usrp11get_rx_gainEj':
multi_usrp.hpp:637: undefined reference to `_imp___ZN3uhd4usrp10multi_usrp9ALL_GAINSE'
collect2.exe: error: ld returned 1 exit status
I read about linking the lib files to the project but the API i downloaded dont seem to have any .lib, .a or any other lib type files. I downloaded it from their website, http://files.ettus.com/manual/page_install.html.
Any kind of help would be tremendously appreciated, i've been trying to figure out what the issue is for hours. As a note, im working on Windows 10 OS.
Those are all simply linker errors saying that, hey, you have to add the linker libraries.
I don't know what exactly you've downloaded, but the Windows installers should come with the .lib and .dlls necessary for linking under windows. otherwise, if you've dowloaded the source code, you'd have to follow the manual to build UHD on windows, and add the resulting library to your linking list.
Are you using the -luhd option in your command line when compiling?
If you installed uhd, most probably, you already have the library correctly added to the include path and adding that option will fix the problem.
So the g++ line should look like:
g++ my_file.cpp -o program_name -luhd
According to Microsoft, starting with Windows 10, applications using shared-mode WASAPI can request buffer sizes smaller than 10ms (see https://msdn.microsoft.com/en-us/library/windows/hardware/mt298187%28v=vs.85%29.aspx).
According to the article, achieving such low latencies requires some driver updates, which I did. Using an exclusive-mode render and capture stream, I measured a total round-trip latency (using a hardware loopback cable) of around 13ms. This suggests to me that at least one of the endpoints successfully achieves a latency of < 10ms. (Is this assumption correct?)
The article mentions that applications can use the new IAudioClient3 interface to query the minimum buffer size supported by the Windows audio engine using IAudioClient3::GetSharedModeEnginePeriod(). However, this function always returns 10ms on my system, and any attempt to initialize an audio stream using either IAudioClient::Initialize() or IAudioClient3::InitializeSharedAudioStream() with a period lower than 10ms always results in AUDCLNT_E_INVALID_DEVICE_PERIOD.
Just to be sure, I also disabled any effects processing in the audio drivers.
What am I missing? Is it even possible to get low latency from shared mode?
See below for some sample code.
#include <windows.h>
#include <atlbase.h>
#include <mmdeviceapi.h>
#include <audioclient.h>
#include <iostream>
#define VERIFY(hr) do { \
auto temp = (hr); \
if(FAILED(temp)) { \
std::cout << "Error: " << #hr << ": " << temp << "\n"; \
goto error; \
} \
} while(0)
int main(int argc, char** argv) {
HRESULT hr;
CComPtr<IMMDevice> device;
AudioClientProperties props;
CComPtr<IAudioClient> client;
CComPtr<IAudioClient2> client2;
CComPtr<IAudioClient3> client3;
CComHeapPtr<WAVEFORMATEX> format;
CComPtr<IMMDeviceEnumerator> enumerator;
REFERENCE_TIME minTime, maxTime, engineTime;
UINT32 min, max, fundamental, default_, current;
VERIFY(CoInitializeEx(nullptr, COINIT_APARTMENTTHREADED));
VERIFY(enumerator.CoCreateInstance(__uuidof(MMDeviceEnumerator)));
VERIFY(enumerator->GetDefaultAudioEndpoint(eRender, eMultimedia, &device));
VERIFY(device->Activate(__uuidof(IAudioClient), CLSCTX_ALL, nullptr, reinterpret_cast<void**>(&client)));
VERIFY(client->QueryInterface(&client2));
VERIFY(client->QueryInterface(&client3));
VERIFY(client3->GetCurrentSharedModeEnginePeriod(&format, ¤t));
// Always fails with AUDCLNT_E_OFFLOAD_MODE_ONLY.
hr = client2->GetBufferSizeLimits(format, TRUE, &minTime, &maxTime);
if(hr == AUDCLNT_E_OFFLOAD_MODE_ONLY)
std::cout << "GetBufferSizeLimits returned AUDCLNT_E_OFFLOAD_MODE_ONLY.\n";
else if(SUCCEEDED(hr))
std::cout << "hw min = " << (minTime / 10000.0) << " hw max = " << (maxTime / 10000.0) << "\n";
else
VERIFY(hr);
// Correctly? reports a minimum hardware period of 3ms and audio engine period of 10ms.
VERIFY(client->GetDevicePeriod(&engineTime, &minTime));
std::cout << "hw min = " << (minTime / 10000.0) << " engine = " << (engineTime / 10000.0) << "\n";
// All values are set to a number of frames corresponding to 10ms.
// This does not change if i change the device's sampling rate in the control panel.
VERIFY(client3->GetSharedModeEnginePeriod(format, &default_, &fundamental, &min, &max));
std::cout << "default = " << default_
<< " fundamental = " << fundamental
<< " min = " << min
<< " max = " << max
<< " current = " << current << "\n";
props.bIsOffload = FALSE;
props.cbSize = sizeof(props);
props.eCategory = AudioCategory_ForegroundOnlyMedia;
props.Options = AUDCLNT_STREAMOPTIONS_RAW | AUDCLNT_STREAMOPTIONS_MATCH_FORMAT;
// Doesn't seem to have any effect regardless of category/options values.
VERIFY(client2->SetClientProperties(&props));
format.Free();
VERIFY(client3->GetCurrentSharedModeEnginePeriod(&format, ¤t));
VERIFY(client3->GetSharedModeEnginePeriod(format, &default_, &fundamental, &min, &max));
std::cout << "default = " << default_
<< " fundamental = " << fundamental
<< " min = " << min
<< " max = " << max
<< " current = " << current << "\n";
error:
CoUninitialize();
return 0;
}
Per Hans in the comment above, double-check that you've followed the instructions for Low Latency Audio here.
I'd reboot the machine just to be sure; Windows can be a bit finicky with that kind of thing.