Using c, c++ (mbed, Arduino, etc), Is there a trick up c's sleeve to be able to set an ON time and OFF time over a 24 hour period. For instance 'ON' at 20:00 hours and off at 06:30 hours following morning.
Timers are no good here if there is a nvic reset. If the device does fall over and restart's at say 23:40 hours, we still need to service that 20:00 to 06:30 time frame.
Stuck on the going past midnight.
I've got this far using seconds but not quite working, but I'm sure I'm barking up the wrong tree so I would appreciate some clever input here.
lockStatus = 1 is 'ON'
lockStatus = 0 is 'OFF'
void autoLOCK()
{
int hour_from, minute_from = 0;
int seconds_from = 0 ;
int hour_to, minute_to = 0;
int seconds_to = 0;
lockFrom = "20:00";
lockTo = "06:30";
if (sscanf(lockFrom, "%d:%d", &hour_from, &minute_from) >= 2)
{
seconds_from = (hour_from * 3600 + minute_from * 60);
}
if (sscanf(lockTo, "%d:%d", &hour_to, &minute_to) >= 2)
{
seconds_to = (hour_to * 3600 + minute_to * 60);
}
lockStatus = 0;
if (seconds_now >= seconds_from) {
lockStatus = 1;
}
if (seconds_from > seconds_to) {
lockStatus = 1;
}
if (seconds_now >= seconds_to && seconds_from >= seconds_to) {
lockStatus = 0;
}
Serial.printf("Lock Status: %d\n\n", lockStatus);
}
Related
In my case i get the time in this format : PT2H3M20S i have no idea about the regex expression [using dart] so I just want to know how can we calculate milliseconds from above format.. thanks in advance
Future<http.Response> getVideoDuration({var videoUri}) async {
// print(videoUri);
final BI_YT_API_KEY = "some_API";
var lArr = videoUri.split('/');
var lId = lArr[lArr.length - 1];
var data = await http.get('https://www.googleapis.com/youtube/v3/videos' +
"?id=$lId&part=contentDetails&key=$BI_YT_API_KEY");
if (data.statusCode == 200) {
var jom = json.decode(data.body);
print(jom['items'][0]['contentDetails']['duration']);
var duration = data.body[0];
}
Took some time. But fiinally done.
You can use it like this.
int seconds = convertTime("PT1H11S");
Here, seconds will be the converted duration in seconds. So, for PT1H11S, the answer will be, 3611 because of 1 hour == 3600 seconds + 11 seconds.
int convertTime(String duration) {
RegExp regex = new RegExp(r'(\d+)');
List<String> a = regex.allMatches(duration).map((e) => e.group(0)!).toList();
if (duration.indexOf('M') >= 0 &&
duration.indexOf('H') == -1 &&
duration.indexOf('S') == -1) {
a = ["0", a[0], "0"];
}
if (duration.indexOf('H') >= 0 && duration.indexOf('M') == -1) {
a = [a[0], "0", a[1]];
}
if (duration.indexOf('H') >= 0 &&
duration.indexOf('M') == -1 &&
duration.indexOf('S') == -1) {
a = [a[0], "0", "0"];
}
int seconds = 0;
if (a.length == 3) {
seconds = seconds + int.parse(a[0]) * 3600;
seconds = seconds + int.parse(a[1]) * 60;
seconds = seconds + int.parse(a[2]);
}
if (a.length == 2) {
seconds = seconds + int.parse(a[0]) * 60;
seconds = seconds + int.parse(a[1]);
}
if (a.length == 1) {
seconds = seconds + int.parse(a[0]);
}
return seconds;
}
I also managed to get the duration in seconds using dart (If in case someone needed it)
/// For duration = 2H1M48S
converToSeconds(String duration){
var hour = "", minute = "", seconds = "";
var tempList = duration.split('');
/// HOUR
if (tempList.contains('H')) {
var ind = tempList.indexOf('H');
for (int i = 0; i < ind; i++) {
hour = hour + tempList[i];
}
tempList.removeRange(0, ind + 1);
}
/// MINUTES
if (tempList.contains('M')) {
var ind = tempList.indexOf('M');
for (int i = 0; i < ind; i++) {
minute = minute + tempList[i];
}
tempList.removeRange(0, ind + 1);
}
/// SECONDS
if (tempList.contains('S')) {
var ind = tempList.indexOf('S');
for (int i = 0; i < ind; i++) {
seconds = seconds + tempList[i];
}
tempList.removeRange(0, ind + 1);
}
/// CONVER TO INT
hour = hour != "" ? hour : '0';
seconds = seconds != "" ? seconds : '0';
minute = minute != "" ? minute : '0';
var ms = ((int.parse(hour) * 3600 + int.parse(minute) * 60) + int.parse(seconds));
}
Note: I do not know flutter but I have heard that a flutter developer should be able to use Java code. This answer is based on Java.
tl;dr
With Java, all you need is:
Duration.parse("PT2H3M20S").toMillis()
java.time.Duration is modelled on ISO-8601 standards and was introduced with Java-8 as part of JSR-310 implementation.
If you have gone through the above links, you might have already noticed that PT2H3M20S specifies a duration of 2 hours 3 minutes 20 seconds that you can parse to a Duration object which you can convert into milliseconds.
Demo:
import java.time.Duration;
public class Main {
public static void main(String[] args) {
String strIso8601Duration = "PT2H3M20S";
Duration duration = Duration.parse(strIso8601Duration);
long millis = duration.toMillis();
System.out.println(millis);
}
}
Output:
7400000
Learn more about the modern date-time API* from Trail: Date Time.
* For any reason, if you have to stick to Java 6 or Java 7, you can use ThreeTen-Backport which backports most of the java.time functionality to Java 6 & 7. If you are working for an Android project and your Android API level is still not compliant with Java-8, check Java 8+ APIs available through desugaring and How to use ThreeTenABP in Android Project.
I have a simple loop
LARGE_INTEGER ticks_per_second;
::QueryPerformanceFrequency(&ticks_per_second);
MSG msg = { 0 };
while (true)
{
if (msg.message == WM_QUIT)
exit(0);
if (::PeekMessageW(&msg, NULL, 0U, 0U, PM_REMOVE))
{
::TranslateMessage(&msg);
::DispatchMessageW(&msg);
continue;
}
static double last_time_s = 0;
LARGE_INTEGER cur_time_li;
::QueryPerformanceCounter(&cur_time_li);
double cur_time_s = (double)cur_time_li.QuadPart / (double)ticks_per_second.QuadPart;
double diff_s = cur_time_s - last_time_s;
double rate_s = 1 / 30.0f;
uint32_t slept_ms = 0;
if (diff_s < rate_s)
{
slept_ms = (uint32_t)((rate_s - diff_s) * 1000.0);
::Sleep(slept_ms);
}
update();
::printf("updated %f %u\n", diff_s, slept_ms);
last_time_s = cur_time_s;
}
And want update() to be called 30 times per second, but not more often
With this code it goes wrong, in console I getting something like this:
updated 0.031747 1
updated 0.001997 31
updated 0.031912 1
updated 0.001931 31
updated 0.031442 1
updated 0.002084 31
Which is seems to be correct only for first update, second one called too fast, and I can't understand why
I understand that update, PeekMessageW and etc. also wasting time, but even if I create a while (true) loop and comment update() out, it's still printing similar result
I using DirectX 11 with vsync turned off for rendering (rendering inside update function):
g_pSwapChain->Present(0, 0);
How do I fix code to make update() stable called 30 times in one second?
I don't think casting to double is good idea.I would run something like this:
static LARGE_INTEGER last_time_s = { 0 };
::QueryPerformanceCounter(&cur_time_li);
time_diff_microsec.QuadPart = cur_time_li.QuadPart - last_time_s.QuadPart;
// To avoid precision lost, convert to seconds *before* dividing by ticks-per-second.
time_diff_microsec.QuadPart *= 1000000;
time_diff_microsec.QuadPart /= ticks_per_second.QuadPart;
double rate_s = 1 / 30.0f;
uint32_t slept_ms = 0;
if (time_diff_microsec.QuadPart >= rate_s)// if (diff_s < rate_s)
{
// slept_ms = (uint32_t)(rate_s - time_diff_microsec.LowPart);// *1000.0);
// ::Sleep(slept_ms);
//}
//update();
::printf("updated %lld %u\n", time_diff_microsec.QuadPart, slept_ms);
}
last_time_s.QuadPart = time_diff_microsec.QuadPart/ 1000000;
}
Just brief "sketch". Not verified that calculations are correct though.
So I'm trying to create an energy meter device which will read power every minute and then send it every 5 minutes through a LoRa server, using an MKR 1300 arduino. The problem is that as of now the hardware is removing a few milliseconds on the delay and so the time in the server ends up being p.e:
10:50:30
10:50:30
10:50:30
... 2 hours later
10:50:29
10:50:29
...
10:49:59
The code looks like this:
#include <MKRWAN.h>
#include "EmonLib.h"
LoRaModem modem;
String appEui = "1234567891011121";
String appKey = "ffffffffffffffffffffffffffffffff";
EnergyMonitor emon1;
EnergyMonitor emon2;
EnergyMonitor emon3;
double totalWatt;
int time_running;
int sending;
int totalKW;
int DELAY = 60000; // millis
void setup() {
Serial.begin(115200);
if (!modem.begin(EU868)) {
Serial.println("Failed to start module");
while (1) {}
};
Serial.print("Your module version is: ");
Serial.println(modem.version());
Serial.print("Your device EUI is: ");
Serial.println(modem.deviceEUI());
Serial.println("Connecting");
int connected = modem.joinOTAA(appEui, appKey);
if (!connected) {
Serial.println("Something went wrong; are you indoor? Move near a window and retry");
while (1) {}
}
Serial.println("Connected");
modem.minPollInterval(60);
analogReadResolution(9);
emon1.current(1, 53);
emon2.current(2, 53);
emon3.current(3, 53);
time_running = 0;
randomSeed(analogRead(A4));
}
void loop() {
unsigned long StartTime = millis();
totalWatt = 0;
unsigned long delay_send = 0;
int sending = 0;
double Irms1 = emon1.calcIrms(600);
if (Irms1 < 0.3) Irms1 = 0;
double Watt1 = Irms1 * 230;
double Irms2 = emon2.calcIrms(600);
if (Irms2 < 0.3) Irms2 = 0;
double Watt2 = Irms2 * 230;
double Irms3 = emon3.calcIrms(600);
if (Irms3 < 0.3) Irms3 = 0;
double Watt3 = Irms3 * 230;
totalWatt = Watt1 + Watt2 + Watt3;
totalKW = totalKW + totalWatt/1000;
if (time_running == 5) { //15 para 15 mins
double IrmsTotal = Irms1 +Irms2 + Irms3;
String msg = "{\"id\":\"avac_aud1\",\"kW\":"+String(totalKW)+", \"current\":"+String(IrmsTotal)+"}";
int err;
modem.beginPacket();
modem.print(msg);
err = modem.endPacket(true);
if (err > 0) {
//message sent correctly
time_running = 0;
totalKW = 0;
} else {
Serial.println("ERR");
time_running = 0;
}
}
time_running = time_running + 1;
if ((millis() - StartTime) > DELAY){
delay(10);
return;
} else{
delay(DELAY-(millis() - StartTime));
return;
}
}
I tried adding a variable ARD_DELAY (not shown above) to the code that in that last delay would subtract 7 to 8 milliseconds to try and fix this, but apparently, it only made it worse (now it removes 1 second every 1 hours instead of 2 hours) so today I'll try to add those 7 to 8 millis and see if it works, but I would really like to know why the heck this is happening because from what I can see from my code the delay should always account for the processed time including the data sending time.
Question is, how precise is your clock at all...
Still, I personally would rather go with the following approach:
#define DELAY (5UL * 60UL * 1000UL) // or whatever is appropriate...
static unsigned long timestamp = millis();
if(millis() - timestamp > DELAY)
{
// adding a fix constant will prevent accumulating deviations over time
timestamp += DELAY;
// run the every-5-min task...
}
Edit: combined 1-min and 5-min task:
Variant 1:
#define DELAY_SHORT (1UL * 60UL * 1000UL)
#define DELAY_LONG (5UL * 60UL * 1000UL)
static unsigned long timestampS = millis();
static unsigned long timestampL = timestampS;
if(millis() - timestampS > DELAY_SHORT)
{
timestamp += DELAY_SHORT;
// run the every-1-min task...
}
if(millis() - timestampL > DELAY_LONG)
{
timestamp += DELAY_LONG;
// run the every-5-min task...
}
Variant 2:
#define DELAY_1M (1UL * 60UL * 1000UL)
static unsigned long timestamp = millis();
if(millis() - timestamp > DELAY)
{
// adding a fix constant will prevent accumulating deviations over time
timestamp += DELAY;
// run the every-1-min task...
static unsigned int counter = 0;
if(++counter == 5)
{
counter = 0;
// run the every-5-min task...
}
}
Instead of trying to measure a start time and adding delay depending on that, you could keep track of the timing for your next cycle.
unsigned long next_cycle = DELAY;
...
void loop() {
...
delay( next_cycle - millis() );
next_cycle += DELAY;
}
If you also want to adjust for any time the program spends on initialization or similar, you can next_cycle = millis() + DELAY; before you enter your loop.
I am building a spacial octree. In order to determine in which branch/octant a certain point (x,y,z) should be placed, I use this function:
if (x>x_centre) {
xsign = 1;
}
else {
xsign = 0;
}
if (y>y_centre) {
ysign = 1;
}
else {
ysign = 0;
}
if (z>z_centre) {
zsign = 1;
}
else {
zsign = 0;
}
return xsign + 2*ysign + 4*zsign;
It returns a number between 0 and 7 unique for every octant. It turns out this snippet is called a big many times. It gets quite time consuming when building large trees.
Is there any easy way to speed this proces up?
This allready gives a 30 percent speed up:
xsign = x>x_centre;
ysign = y>y_centre;
zsign = z>y_centre;
return xsign + 2*ysign + 4*zsign;
Any other tips?
I'm curious, if there's any way to find out the UTC date/time when the next Daylight Saving adjustment will take place?
Something akin to what Windows reports (see circled):
This information is provided in Windows by the EnumDynamicTimeZoneInformation function.
See http://msdn.microsoft.com/en-us/library/windows/desktop/hh706893%28v=vs.85%29.aspx
There is a database that has code and data: http://www.iana.org/time-zones
I don't think there's a specific API for this. I would just do a binary search, using localtime (and maybe time and mktime) from <ctime> (C++) or <time.h> (C).
A basic approach is to scan ahead three months at a time until the tm_isdst flag in the returned data structure is flipped. Then you can start binary searching between the last two two dates to figure out exactly when it flips.
See http://www.cplusplus.com/reference/ctime/tm/ for reference material.
I appreciate all your replies. And, yes, indeed I was asking about a WinAPI for Windows.
I did more research and came up with the following method that does what I wanted. It uses C++ and MFC's COleDateTime for easier date/time calculations. Other than that it's just C++ and WinAPIs. Please check if I understood the documentation for the DYNAMIC_TIME_ZONE_INFORMATION correctly. Here's the code:
int GetNextDaylightSavingAdjustmentTime(SYSTEMTIME* pOutDtNextDST_Local, int* pnOutAdjustmentMin)
{
//Get next time when DST adjustment will take place
//'pOutDtNextDST_Local' = if not NULL, receives the (local) time when next DST adjustment will take place
//'pnOutAdjustmentMin' = if not NULL, receives the amount of adjustment in minutes
//RETURN:
// = 1 if got the time, or
// = 0 if DST is not used
// = -1 if error (check GetLastError() for info)
int nOSError = NO_ERROR;
//Load API dynamically (in case of Windows XP)
BOOL (WINAPI *pfnGetDynamicTimeZoneInformation)(PDYNAMIC_TIME_ZONE_INFORMATION);
(FARPROC&)pfnGetDynamicTimeZoneInformation =
::GetProcAddress(::GetModuleHandle(L"Kernel32.dll"), "GetDynamicTimeZoneInformation");
DWORD tzID;
SYSTEMTIME StandardDate;
SYSTEMTIME DaylightDate;
int nBiasDaylight;
//Use newer API if possible
if(pfnGetDynamicTimeZoneInformation)
{
DYNAMIC_TIME_ZONE_INFORMATION dtzi = {0};
tzID = pfnGetDynamicTimeZoneInformation(&dtzi);
StandardDate = dtzi.StandardDate;
DaylightDate = dtzi.DaylightDate;
nBiasDaylight = dtzi.DaylightBias;
}
else
{
//Older API
TIME_ZONE_INFORMATION tzi = {0};
tzID = GetTimeZoneInformation(&tzi);
StandardDate = tzi.StandardDate;
DaylightDate = tzi.DaylightDate;
nBiasDaylight = tzi.DaylightBias;
}
int nRes = -1;
int nAdjMins = 0;
SYSTEMTIME stDstChange;
memset(&stDstChange, 0, sizeof(stDstChange));
SYSTEMTIME stDst;
if(tzID == TIME_ZONE_ID_STANDARD ||
tzID == TIME_ZONE_ID_DAYLIGHT)
{
stDst = tzID != TIME_ZONE_ID_DAYLIGHT ? DaylightDate : StandardDate;
if(stDst.wMonth >= 1 &&
stDst.wMonth <= 12 &&
stDst.wDay >= 1 &&
stDst.wDayOfWeek >= 0 &&
stDst.wDayOfWeek <= 6)
{
//Get adjustment bias
nAdjMins = tzID != TIME_ZONE_ID_DAYLIGHT ? -nBiasDaylight : nBiasDaylight;
if(stDst.wYear == 0)
{
//Relative date
SYSTEMTIME stLocal;
::GetLocalTime(&stLocal);
//Begin from the 1st day of the month &
//make sure that the date is in the future
COleDateTime dt;
for(int nYear = stLocal.wYear;; nYear++)
{
dt.SetDateTime(nYear, stDst.wMonth, 1, stDst.wHour, stDst.wMinute, stDst.wSecond);
if(dt > COleDateTime::GetCurrentTime())
break;
}
int nRequiredWeek = stDst.wDay >= 1 && stDst.wDay <= 5 ? stDst.wDay : 5;
for(int nCntDOW = 1;;)
{
//0=Sunday, 1=Monday; 2=Tuesday; 3=Wednesday; 4=Thursday; 5=Friday; 6=Saturday
int dow = dt.GetDayOfWeek() - 1;
ASSERT(dow >= 0 && dow <= 6);
if(dow == stDst.wDayOfWeek)
{
if(nCntDOW >= nRequiredWeek)
{
//Stop
break;
}
else
{
nCntDOW++;
}
}
//Go to next day
dt += COleDateTimeSpan(1, 0, 0, 0);
}
//Convert back to system time
if(dt.GetAsSystemTime(stDstChange))
{
//Success
nRes = 1;
}
else
{
//Failed
nOSError = ERROR_INVALID_FUNCTION;
ASSERT(NULL);
}
}
else
{
//Absolute date
stDstChange = stDst;
nRes = 1;
}
}
else
{
//Failed
nOSError = ERROR_INVALID_PARAMETER;
ASSERT(NULL);
}
}
else
{
//DST is not used
if(tzID == TIME_ZONE_ID_UNKNOWN)
{
nRes = 0;
}
else
{
//Error
nOSError = ERROR_INVALID_DATA;
ASSERT(NULL);
}
}
if(pOutDtNextDST_Local)
*pOutDtNextDST_Local = stDstChange;
if(pnOutAdjustmentMin)
*pnOutAdjustmentMin = nAdjMins;
::SetLastError(nOSError);
return nRes;
}
PS. And scratch my request for the UTC time. As I learned, it is easier to deal with local time in this situation.