Hi, I am trying to use the T7 on an ARM SOC (the specific chip is a Samsung Exynos-5422). I wrote some simple test code to read the first 4 analogue inputs in streaming mode, and it worked just fine when I tested it on an x64 system running debian. However, the exact same code throws a STREAM_SETTLING_INVALID error when calling LJM_eStreamStart(). Here is the code in question:
#include <signal.h>
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include "LabJackM.h"
// LJM configuration files
#define LJM_CONFIG_FILE "./labjack_ljm/LabJack/LJM/ljm_startup_configs.json"
#define LJM_CONSTS_FILE "./labjack_ljm/LabJack/LJM/ljm_constants.json"
#define NUM_CHANNELS 4 // Number of analog channels to be scanned
#define SCAN_RATE 20 // Scans per second
#define SCANS_PER_READ 1 // Number of scans per stream read
int execute;
// Ends program when SIGINT is received
void signal_handler()
{
execute = 0;
}
// Prints a description of an LJM error
// err_code: The LJM error code
// err_name: A description of where the error occurred
void handleError(LJM_ERROR_RETURN err_code, const char* err_name)
{
char err_str[LJM_MAX_NAME_SIZE];
if(err_code)
{
LJM_ErrorToString(err_code, err_str);
fprintf(stderr, "LJM ERROR IN %s: %s\n", err_name, err_str);
}
}
int main(int argc, char** argv)
{
int i;
LJM_ERROR_RETURN ljm_err; // Error code returned from LabJack functions
int ljm_handle; // Communication handle for the LabJack device
int ljm_dev_scan_backlog; // Number of scans left in device buffer
int ljm_scan_backlog; // Number of scans left in LJM library buffer
int ljm_address_types[NUM_CHANNELS]; // Address types of each channel to scan
int ljm_scanlist[NUM_CHANNELS]; // Array of channels to scan
// Device information return variables for LJM_ListAll()
int ljm_num_found = -1; // Number of devices found
int ljm_devices[LJM_LIST_ALL_SIZE]; // Device types
int ljm_connections[LJM_LIST_ALL_SIZE]; // Device connection types
int ljm_serialnums[LJM_LIST_ALL_SIZE]; // Device serial numbers
int ljm_ips[LJM_LIST_ALL_SIZE]; // Device IP addresses
double ljm_scanrate; // Number of scans per second
double ljm_data[NUM_CHANNELS]; // Data read from analog channels
char ljm_err_str[LJM_MAX_NAME_SIZE];
double val;
int int_val;
// Names of each channel to be scanned
const char* ljm_address_names[NUM_CHANNELS] = {"AIN0",
"AIN1",
"AIN2",
"AIN3"};
signal(SIGINT, signal_handler);
printf("Opening configuration files...\n");
ljm_err = LJM_LoadConfigurationFile(LJM_CONFIG_FILE);
handleError(ljm_err, "LJM_LoadConfigurationFile");
ljm_err = LJM_LoadConstantsFromFile(LJM_CONSTS_FILE);
handleError(ljm_err, "LJM_LoadConstantsFromFile");
printf("Getting information of all connected devices...\n");
ljm_err = LJM_ListAll(0, 0, &ljm_num_found, ljm_devices,
ljm_connections, ljm_serialnums, ljm_ips);
handleError(ljm_err, "LJM_ListAll");
printf("Found %d device(s)\n", ljm_num_found);
if(ljm_num_found)
{
printf("TYPE\tSERIAL\t\tIP\n");
for(i=0; i<ljm_num_found; i++)
{
printf("%d\t%d\t%d\n", ljm_devices[i], ljm_serialnums[i], ljm_ips[i]);
}
}
printf("Converting channel names to addresses...\n");
ljm_err = LJM_NamesToAddresses(NUM_CHANNELS, ljm_address_names,
ljm_scanlist, ljm_address_types);
handleError(ljm_err, "LJM_NamesToAddresses");
printf("Addresses: [ ");
for(i=0; i<NUM_CHANNELS; i++)
{
printf("%d ", ljm_scanlist[i]);
}
printf("]\n");
printf("Types: [ ");
for(i=0; i<NUM_CHANNELS; i++)
{
printf("%d ", ljm_address_types[i]);
}
printf("]\n");
printf("Opening device...\n");
ljm_err = LJM_Open(LJM_dtANY, LJM_ctUSB, "ANY", &ljm_handle);
handleError(ljm_err, "LJM_Open");
if(ljm_err)
{
exit(0);
}
printf("Setting device to non-blocking mode...\n");
ljm_err = LJM_WriteLibraryConfigS(LJM_STREAM_SCANS_RETURN,
LJM_STREAM_SCANS_RETURN_ALL_OR_NONE);
handleError(ljm_err, "LJM_WriteLibraryConfigS");
printf("Initializing stream...\n");
ljm_scanrate = SCAN_RATE;
ljm_err = LJM_eStreamStart(ljm_handle, SCANS_PER_READ, NUM_CHANNELS,
ljm_scanlist, &ljm_scanrate);
handleError(ljm_err, "LJM_eStreamStart");
printf("Requested scan rate: %d Actual scan rate: %f\n",
SCAN_RATE, ljm_scanrate);
ljm_err = LJM_eReadName(ljm_handle, "STREAM_SETTLING_US", &val);
printf("STREAM_SETTLING_US: %f\n", val);
sleep(5);
printf("Starting stream reads...\n");
execute = 1;
while(execute)
{
usleep(1000);
//ljm_err = LJM_eReadAddress(ljm_handle, 0, 3, &val);
//handleError(ljm_err, "LJM_eReadAddress");
//printf("DIO0: %f\n", val);
ljm_err = LJM_eStreamRead(ljm_handle, ljm_data,
&ljm_dev_scan_backlog,
&ljm_scan_backlog);
handleError(ljm_err, "LJM_eStreamRead");
if(ljm_err == LJME_NO_SCANS_RETURNED)
{
continue;
}
printf("Device backlog: %d Library backlog: %d\n",
ljm_dev_scan_backlog, ljm_scan_backlog);
printf("Channels: [ ");
for(i=0; i<NUM_CHANNELS; i++)
{
printf("%f ", ljm_data[i]);
}
printf("]\n\n");
}
printf("Stopping stream...\n");
ljm_err = LJM_eStreamStop(ljm_handle);
handleError(ljm_err, "LJM_eStreamStop");
printf("Closing connection...\n");
ljm_err = LJM_Close(ljm_handle);
handleError(ljm_err, "LJM_Close");
return 0;
}
This is the output I get when running the above code on the ARM system:
$ ./t7-test
Opening configuration files...
Getting information of all connected devices...
Found 1 device(s)
TYPE SERIAL IP
7 470013664 0
Converting channel names to addresses...
Addresses: [ 0 2 4 6 ]
Types: [ 3 3 3 3 ]
Opening device...
Setting device to non-blocking mode...
Initializing stream...
LJM ERROR IN LJM_eStreamStart: STREAM_SETTLING_INVALID
Requested scan rate: 20 Actual scan rate: 20.000000
STREAM_SETTLING_US: 0.000000
Starting stream reads...
LJM ERROR IN LJM_eStreamRead: LJME_NO_SCANS_RETURNED
LJM ERROR IN LJM_eStreamRead: LJME_NO_SCANS_RETURNED
Is there some difference between the x86 and ARM6 libraries that requires different setup for streaming mode?
LJM library usage on ARM and x86 is the same.
To start troubleshooting, before you start stream mode set the STREAM_SETTLING_US setting and see if that helps. Valid values are 0 to 4400, and 0 is default. For example:
//Set stream settling to default
ljm_err = LJM_eWriteName(ljm_handle, "STREAM_SETTLING_US", 0);
//Check for error, other configuration and start stream
This is in case another application/code set it to something invalid, and is causing the error you are seeing.
Thanks for your reply. I managed to solve the problem by setting STREAM_SETTLING_US to a non-zero value (such as 10). For some reason, it fails if it is set to 0. I'm not sure why, but I consider this problem solved, thank you.
When settling is set to zero the T7 will attempt to determine the best settling value based on resolution and gain settings. STREAM_SETTLING_INVALID means that the amount of settling selected by the automatic system requires more settling time than stream's maximum of 4.4 ms. Check the gain settings on AIN0-3 and check your stream's resolution setting.
Also, it is a good idea to set the channel specific gain (range) settings when configuring stream.