Writing to filesystem not console - c#-5.0

I want to write the numbers of all the bank numbers to an actual file. Not console version. But I dont see the actual files in the file system. How to write the numbers to the file system?
I have this:
static void Main(string[] args)
{
string path = #"G:\Folder";
//string fileName = string.Format("{0}{1}-", part[0], part[part.Count - 1]);
var bans = BankAcoutNumbers.BANS;
const int MAX_FILES = 10;
const int BANS_PER_FILE = 10;
int bansCounter = 0;
var part = new List<int>();
//string fileName = string.Format("{0}-{1}", part[0], part[part.Count - 1]);
var maxNumberOfFiles = 10;
Stopwatch timer = new Stopwatch();
var fileCounter = 0;
if (!Directory.Exists(path))
{
DirectoryInfo di = Directory.CreateDirectory(path);
}
//var destiNationFile = new StreamWriter(string.Format(fileName, fileCounter + 1));
try
{
// foreach (var bank in BankAcoutNumbers.BANS.Take(100))
//{
while (fileCounter <= maxNumberOfFiles)
{
timer.Start();
foreach (var bank in BankAcoutNumbers.BANS)
{
part.Add(bank);
if(++bansCounter >= BANS_PER_FILE)
{
string fileName = string.Format("{0}-{1}", part[0], part[part.Count - 1]);
//var destinationFile = new StreamWriter(fileName);
//destiNationFile = new StreamWriter(fileName);
Console.WriteLine("NR{0}", fileName);
foreach (var partBan in part )
Console.WriteLine(partBan);
part.Clear();
bansCounter = 0;
if (++fileCounter >= MAX_FILES)
break;
//lineCounter = 0;
//destiNationFile.Flush();
//destiNationFile.Dispose();
//destiNationFile = new StreamWriter(string.Format(fileName, fileCounter + 1));
//fileCounter++;
}
//destiNationFile.WriteLine(bank);
//lineCounter++;
}
//fileCounter++;
//}
}
timer.Stop();
Console.WriteLine(timer.Elapsed.Seconds);
}
catch (Exception)
{
throw;
}
// Keep the console window open in debug mode.
System.Console.WriteLine("Press any key to exit.");
System.Console.ReadKey();
}
Thank you
public class Program
{
//BankAcoutNumbers bankAccountNumbers = new BankAcoutNumbers();
static void Main(string[] args)
{
string path = #"G:\Folder";
//string fileName = string.Format("{0}{1}-", part[0], part[part.Count - 1]);
var bans = BankAcoutNumbers.BANS;
const int MAX_FILES = 10;
const int BANS_PER_FILE = 10;
int bansCounter = 0;
var part = new List<int>();
//part.Add(456456465);
//string fileName = string.Format("{0}-{1}", part[0], part[part.Count - 1]);
var maxNumberOfFiles = 10;
Stopwatch timer = new Stopwatch();
var fileCounter = 0;
if (!Directory.Exists(path))
{
DirectoryInfo di = Directory.CreateDirectory(path);
}
//var destiNationFile = new StreamWriter(string.Format(fileName, fileCounter + 1));
try
{
// foreach (var bank in BankAcoutNumbers.BANS.Take(100))
//{
while (fileCounter <= maxNumberOfFiles)
{
timer.Start();
foreach (var bank in BankAcoutNumbers.BANS)
{
part.Add(bank);
if(++bansCounter >= BANS_PER_FILE)
{
string fileName = string.Format("{0}-{1}", part[0], part[part.Count - 1]);
//var destinationFile = new StreamWriter(fileName);
//destiNationFile = new StreamWriter(fileName);
Console.WriteLine("NR{0}", fileName);
fileName = #"G:\\Folder" + fileName;
foreach (var partBan in part)
{
Console.WriteLine(partBan);
System.IO.File.WriteAllText(fileName, partBan.ToString());
}
part.Clear();
bansCounter = 0;
if (++fileCounter >= MAX_FILES)
break;
//lineCounter = 0;
//destiNationFile.Flush();
//destiNationFile.Dispose();
//destiNationFile = new StreamWriter(string.Format(fileName, fileCounter + 1));
//fileCounter++;
}
//destiNationFile.WriteLine(bank);
//lineCounter++;
}
//fileCounter++;
//}
}
timer.Stop();
Console.WriteLine(timer.Elapsed.Seconds);
}
catch (Exception)
{
throw;
}
// Keep the console window open in debug mode.
System.Console.WriteLine("Press any key to exit.");
System.Console.ReadKey();
}

Try this code:
Also please take note that for demonstration purposes I made class BankAcoutNumbers which has member BANS
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
namespace ConsoleApplication1
{
class BankAcoutNumbers
{
public List<int> BANS { get; set; }
public BankAcoutNumbers()
{
BANS = new List<int>();
BANS.Add(1456456465);
BANS.Add(2456465);
BANS.Add(342346465);
BANS.Add(445645646);
BANS.Add(545636546);
BANS.Add(64556465);
BANS.Add(7456465);
BANS.Add(842346465);
BANS.Add(9456456);
BANS.Add(10456365);
BANS.Add(11456456);
BANS.Add(12456465);
BANS.Add(1342346);
BANS.Add(1445645);
BANS.Add(1545636);
BANS.Add(1645645);
BANS.Add(1745646);
BANS.Add(1842345);
BANS.Add(194564);
BANS.Add(2045635);
BANS.Add(214564);
BANS.Add(224564);
BANS.Add(234234);
BANS.Add(244564);
BANS.Add(254563);
}
}
class Program
{
static void Main(string[] args)
{
string path = #"C:\";
//string fileName = string.Format("{0}{1}-", part[0], part[part.Count - 1]);
//var bans = BankAcoutNumbers.BANS;
const int MAX_FILES = 10;
const int BANS_PER_FILE = 10;
int bansCounter = 0;
var part = new List<int>();
//string fileName = string.Format("{0}-{1}", part[0], part[part.Count - 1]);
var maxNumberOfFiles = 10;
Stopwatch timer = new Stopwatch();
var fileCounter = 0;
if (!Directory.Exists(path))
{
DirectoryInfo di = Directory.CreateDirectory(path);
}
//var destiNationFile = new StreamWriter(string.Format(fileName, fileCounter + 1));
try
{
// foreach (var bank in BankAcoutNumbers.BANS.Take(100))
//{
while (fileCounter <= maxNumberOfFiles)
{
timer.Start();
foreach (var bank in new BankAcoutNumbers().BANS)
{
part.Add(bank);
if (++bansCounter >= BANS_PER_FILE)
{
string fileName = string.Format("{0}-{1}", part[0], part[part.Count - 1]);
string outputToFile = "";
//var destinationFile = new StreamWriter(fileName);
//destiNationFile = new StreamWriter(fileName);
Console.WriteLine("NR{0}", fileName);
fileName = #"C:\" + fileName;
foreach (var partBan in part)
{
outputToFile += partBan + Environment.NewLine;
Console.WriteLine(partBan);
}
System.IO.File.WriteAllText(fileName, outputToFile);
part.Clear();
bansCounter = 0;
if (++fileCounter >= MAX_FILES)
break;
}
}
}
timer.Stop();
Console.WriteLine(timer.Elapsed.Seconds);
}
catch (Exception)
{
throw;
}
// Keep the console window open in debug mode.
System.Console.WriteLine("Press any key to exit.");
System.Console.ReadKey();
}
}
}

Well.
I tried this and got the following files at disc c: 42346465-456456465, 456365465-456456465, 456456465-456365465, 456456465-456465, 456465-42346465. Also inside of the files was content which I send to them.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace ConsoleApplication1
{
class BankAcoutNumbers
{
public List<int> BANS { get; set; }
public BankAcoutNumbers()
{
BANS = new List<int>();
BANS.Add(456456465);
BANS.Add(456465);
BANS.Add(42346465);
BANS.Add(456456465);
BANS.Add(456365465);
}
}
class Program
{
static void Main(string[] args)
{
string path = #"C:\";
//string fileName = string.Format("{0}{1}-", part[0], part[part.Count - 1]);
//var bans = BankAcoutNumbers.BANS;
const int MAX_FILES = 10;
const int BANS_PER_FILE = 2;
int bansCounter = 0;
var part = new List<int>();
//string fileName = string.Format("{0}-{1}", part[0], part[part.Count - 1]);
var maxNumberOfFiles = 10;
Stopwatch timer = new Stopwatch();
var fileCounter = 0;
if (!Directory.Exists(path))
{
DirectoryInfo di = Directory.CreateDirectory(path);
}
//var destiNationFile = new StreamWriter(string.Format(fileName, fileCounter + 1));
try
{
// foreach (var bank in BankAcoutNumbers.BANS.Take(100))
//{
while (fileCounter <= maxNumberOfFiles)
{
timer.Start();
foreach (var bank in new BankAcoutNumbers().BANS)
{
part.Add(bank);
if (++bansCounter >= BANS_PER_FILE)
{
string fileName = string.Format("{0}-{1}", part[0], part[part.Count - 1]);
//var destinationFile = new StreamWriter(fileName);
//destiNationFile = new StreamWriter(fileName);
Console.WriteLine("NR{0}", fileName);
fileName = #"C:\" + fileName;
foreach (var partBan in part)
{
Console.WriteLine(partBan);
System.IO.File.WriteAllText(fileName, partBan.ToString());
}
part.Clear();
bansCounter = 0;
if (++fileCounter >= MAX_FILES)
break;
//lineCounter = 0;
//destiNationFile.Flush();
//destiNationFile.Dispose();
//destiNationFile = new StreamWriter(string.Format(fileName, fileCounter + 1));
//fileCounter++;
}
//destiNationFile.WriteLine(bank);
//lineCounter++;
}
//fileCounter++;
//}
}
timer.Stop();
Console.WriteLine(timer.Elapsed.Seconds);
}
catch (Exception)
{
throw;
}
// Keep the console window open in debug mode.
System.Console.WriteLine("Press any key to exit.");
System.Console.ReadKey();
}
}
}

https://msdn.microsoft.com/en-us/library/8bh11f1k.aspx has nice function:
System.IO.File.WriteAllText(#"C:\Users\Public\TestFolder\WriteText.txt", text);

Related

Make xpdf Pdf2Txt function as thread safe

I have tried to use xpdf source code into a MFC application to convert pdf to text. The code sample is taken from their site (or repository):
int Pdf2Txt(std::string PdfFile, std::string TxtFile) const
{
GString* ownerPW, *userPW;
UnicodeMap* uMap;
TextOutputDev* textOut;
TextOutputControl textOutControl;
GString* textFileName;
int exitCode;
char textEncName[128] = "";
char textEOL[16] = "";
GBool noPageBreaks = gFalse;
GBool quiet = gFalse;
char ownerPassword[33] = "\001";
char userPassword[33] = "\001";
int firstPage = 1;
int lastPage = 0;
GBool tableLayout = gFalse;
double fixedPitch = 0;
GBool physLayout = gFalse;
GBool simpleLayout = gFalse;
GBool simple2Layout = gFalse;
GBool linePrinter = gFalse;
GBool rawOrder = gFalse;
double fixedLineSpacing = 0;
double marginLeft = 0;
double marginRight = 0;
double marginTop = 0;
double marginBottom = 0;
GBool clipText = gFalse;
GBool discardDiag = gFalse;
GBool insertBOM = gFalse;
exitCode = 99;
// read config file
globalParams = new GlobalParams("");
if (textEncName[0])
{
globalParams->setTextEncoding(textEncName);
}
if (textEOL[0])
{
if (!globalParams->setTextEOL(textEOL))
{
fprintf(stderr, "Bad '-eol' value on command line\n");
}
}
if (noPageBreaks)
{
globalParams->setTextPageBreaks(gFalse);
}
if (quiet)
{
globalParams->setErrQuiet(quiet);
}
// Set UNICODE support
globalParams->setTextEncoding("UTF-8");
// get mapping to output encoding
if (!(uMap = globalParams->getTextEncoding()))
{
error(errConfig, -1, "Couldn't get text encoding");
goto err1;
}
// open PDF file
if (ownerPassword[0] != '\001')
{
ownerPW = new GString(ownerPassword);
}
else
{
ownerPW = NULL;
}
if (userPassword[0] != '\001')
{
userPW = new GString(userPassword);
}
else
{
userPW = NULL;
}
PDFDoc* doc = new PDFDoc((char*)PdfFile.c_str(), ownerPW, userPW);
if (userPW)
{
delete userPW;
}
if (ownerPW)
{
delete ownerPW;
}
if (! doc->isOk())
{
exitCode = 1;
goto err2;
}
// check for copy permission
if (! doc->okToCopy())
{
error(errNotAllowed, -1, "Copying of text from this document is not allowed.");
exitCode = 3;
goto err2;
}
// construct text file name
textFileName = new GString(TxtFile.c_str());
// get page range
if (firstPage < 1)
{
firstPage = 1;
}
if (lastPage < 1 || lastPage > doc->getNumPages())
{
lastPage = doc->getNumPages();
}
// write text file
if (tableLayout)
{
textOutControl.mode = textOutTableLayout;
textOutControl.fixedPitch = fixedPitch;
}
else if (physLayout)
{
textOutControl.mode = textOutPhysLayout;
textOutControl.fixedPitch = fixedPitch;
}
else if (simpleLayout)
{
textOutControl.mode = textOutSimpleLayout;
}
else if (simple2Layout)
{
textOutControl.mode = textOutSimple2Layout;
}
else if (linePrinter)
{
textOutControl.mode = textOutLinePrinter;
textOutControl.fixedPitch = fixedPitch;
textOutControl.fixedLineSpacing = fixedLineSpacing;
}
else if (rawOrder)
{
textOutControl.mode = textOutRawOrder;
}
else
{
textOutControl.mode = textOutReadingOrder;
}
textOutControl.clipText = clipText;
textOutControl.discardDiagonalText = discardDiag;
textOutControl.insertBOM = insertBOM;
textOutControl.marginLeft = marginLeft;
textOutControl.marginRight = marginRight;
textOutControl.marginTop = marginTop;
textOutControl.marginBottom = marginBottom;
textOut = new TextOutputDev(textFileName->getCString(), &textOutControl, gFalse, gTrue);
if (textOut->isOk())
{
doc->displayPages(textOut, firstPage, lastPage, 72, 72, 0, gFalse, gTrue, gFalse);
}
else
{
delete textOut;
exitCode = 2;
goto err3;
}
delete textOut;
exitCode = 0;
// clean up
err3:
delete textFileName;
err2:
delete doc;
// uMap->decRefCnt();
err1:
delete globalParams;
// check for memory leaks
Object::memCheck(stderr);
gMemReport(stderr);
return exitCode;
}
So far, so good. But this code isn't thread safe: if I am trying run this code inside a multi-threading code, it crashes:
// TextOutputDev.cc
if (uMap->isUnicode())
{
lreLen = uMap->mapUnicode(0x202a, lre, sizeof(lre)); // <-- crash
Why ? Because there is a variable, globalParams, which is deleted in the last lines of the function, and it's common for all threads:
delete globalParams;
And globalParams it's an extern global variable from GlobalParams.h (part of xpdf code):
// xpdf/GlobalParams.h
// The global parameters object.
extern GlobalParams *globalParams;
How can I do this function thread safe ? Because the "problem variable" it's inside xpdf source code, not in mine ...
P.S. To sum up things, globalParams it's declared in xpdf code, and it's cleaned in my (client) code.
The xpdf source code could be seen here:
https://github.com/jeroen/xpdf/blob/c2c946f517eb09cfd09d957e0f3b04d44bf6f827/src/poppler/GlobalParams.h
and
https://github.com/jeroen/xpdf/blob/c2c946f517eb09cfd09d957e0f3b04d44bf6f827/src/poppler/GlobalParams.cc
Try restructuring your code as shown below. I have moved the GlobalParams initialization code into a separate function. This function should be called (once) during initialization, or before starting the threads that call Pdf2Txt(). And of course the GlobalParams instance shouldn't be destroyed because it can be used by multiple threads. It won't hurt your app to keep it memory, it's one object anyway and not really large - well, it contains many int and bool member variables, but these do not take up much space, and quite a few string* variables (initially null or emtpy I guess), so it's just a few KB at most.
bool InitGlobalParams()
{
UnicodeMap* uMap;
char textEncName[128] = "";
char textEOL[16] = "";
GBool noPageBreaks = gFalse;
GBool quiet = gFalse;
// read config file
globalParams = new GlobalParams(""); // <-- Maybe add some checking code here?
if (textEncName[0])
{
globalParams->setTextEncoding(textEncName);
}
if (textEOL[0])
{
if (!globalParams->setTextEOL(textEOL))
{
fprintf(stderr, "Bad '-eol' value on command line\n");
}
}
if (noPageBreaks)
{
globalParams->setTextPageBreaks(gFalse);
}
if (quiet)
{
globalParams->setErrQuiet(quiet);
}
// Set UNICODE support
globalParams->setTextEncoding("UTF-8");
// get mapping to output encoding
if (!(uMap = globalParams->getTextEncoding()))
{
error(errConfig, -1, "Couldn't get text encoding");
return false;
}
return true;
}
int Pdf2Txt(std::string PdfFile, std::string TxtFile) const
{
GString* ownerPW, *userPW;
TextOutputDev* textOut;
TextOutputControl textOutControl;
GString* textFileName;
int exitCode;
char ownerPassword[33] = "\001";
char userPassword[33] = "\001";
int firstPage = 1;
int lastPage = 0;
GBool tableLayout = gFalse;
double fixedPitch = 0;
GBool physLayout = gFalse;
GBool simpleLayout = gFalse;
GBool simple2Layout = gFalse;
GBool linePrinter = gFalse;
GBool rawOrder = gFalse;
double fixedLineSpacing = 0;
double marginLeft = 0;
double marginRight = 0;
double marginTop = 0;
double marginBottom = 0;
GBool clipText = gFalse;
GBool discardDiag = gFalse;
GBool insertBOM = gFalse;
exitCode = 99;
// open PDF file
if (ownerPassword[0] != '\001')
{
ownerPW = new GString(ownerPassword);
}
else
{
ownerPW = NULL;
}
if (userPassword[0] != '\001')
{
userPW = new GString(userPassword);
}
else
{
userPW = NULL;
}
PDFDoc* doc = new PDFDoc((char*)PdfFile.c_str(), ownerPW, userPW);
if (userPW)
{
delete userPW;
}
if (ownerPW)
{
delete ownerPW;
}
if (! doc->isOk())
{
exitCode = 1;
goto err2;
}
// check for copy permission
if (! doc->okToCopy())
{
error(errNotAllowed, -1, "Copying of text from this document is not allowed.");
exitCode = 3;
goto err2;
}
// construct text file name
textFileName = new GString(TxtFile.c_str());
// get page range
if (firstPage < 1)
{
firstPage = 1;
}
if (lastPage < 1 || lastPage > doc->getNumPages())
{
lastPage = doc->getNumPages();
}
// write text file
if (tableLayout)
{
textOutControl.mode = textOutTableLayout;
textOutControl.fixedPitch = fixedPitch;
}
else if (physLayout)
{
textOutControl.mode = textOutPhysLayout;
textOutControl.fixedPitch = fixedPitch;
}
else if (simpleLayout)
{
textOutControl.mode = textOutSimpleLayout;
}
else if (simple2Layout)
{
textOutControl.mode = textOutSimple2Layout;
}
else if (linePrinter)
{
textOutControl.mode = textOutLinePrinter;
textOutControl.fixedPitch = fixedPitch;
textOutControl.fixedLineSpacing = fixedLineSpacing;
}
else if (rawOrder)
{
textOutControl.mode = textOutRawOrder;
}
else
{
textOutControl.mode = textOutReadingOrder;
}
textOutControl.clipText = clipText;
textOutControl.discardDiagonalText = discardDiag;
textOutControl.insertBOM = insertBOM;
textOutControl.marginLeft = marginLeft;
textOutControl.marginRight = marginRight;
textOutControl.marginTop = marginTop;
textOutControl.marginBottom = marginBottom;
textOut = new TextOutputDev(textFileName->getCString(), &textOutControl, gFalse, gTrue);
if (textOut->isOk())
{
doc->displayPages(textOut, firstPage, lastPage, 72, 72, 0, gFalse, gTrue, gFalse);
}
else
{
delete textOut;
exitCode = 2;
goto err3;
}
delete textOut;
exitCode = 0;
// clean up
err3:
delete textFileName;
err2:
delete doc;
// uMap->decRefCnt();
err1:
// Do NOT delete the one and only GlobalParams instance!!!
//delete globalParams;
// check for memory leaks
Object::memCheck(stderr);
gMemReport(stderr);
return exitCode;
}
The above code may not even compile (I modified it with a text editor, not really tested it) so please make any changes that may be required. It is quite expected that the xpdf functions do not modify the globalParams object (it's "read-only" for them) so this code has a good chance to work. Btw there is a #if MULTITHREADED directive in the GlobalParams class definition (GlobalParams.h) containing 3 mutex objects in its block. The implementation code (GlobalParams.cc) locks a mutex to access the GlobalParams members, so this may cause some threads to wait a little, although I can't tell how much (one would have to thoroughly examine the code, which is a small "project" in itself). You can try testing it.
Of course the concerns expressed by #KJ above still apply, running many such threads in parallel could overload the system (although i'm not sure if xpdf uses multiple threads to process a single PDF, could one help please, how is it configured?), esp if you are running this on a server do not allow too many concurrently-running conversions, it may cause other processes to slow down. It may also cause I/O bottleneck (disk and/or network), so experiment with few threads initially and check how it scales up.

Issues with AWS Lambda function, invoke error on cloudwatch

The Lambda Functions success VS. Errors
Hey Y'all I've scrubbed the internet to find the issues with my node.js function. The Cloudwatch error is the following. Any insight or suggestions would be massively helpful! The code runs but there are some issues. I do not know where the /var/runtime/CallbackContext.js code is which seems to be the source of the problem.
2019-12-11T16:34:24.328Z 0211ab2b-8fea-4391-9cc6-01f5c5244c63 ERROR Invoke Error
"errorType": "Error", " at _homogeneousError (/var/runtime/CallbackContext.js:13:12)",
" at postError (/var/runtime/CallbackContext.js:30:51)",
" at done (/var/runtime/CallbackContext.js:57:7)",
" at fail (/var/runtime/CallbackContext.js:69:7)",
" at Object.fail (/var/runtime/CallbackContext.js:105:16)",
" at /var/task/index.js:42:25",
" at IncomingMessage.<anonymous> (/var/task/index.js:444:13)",
" at IncomingMessage.emit (events.js:203:15)",
" at endReadableNT (_stream_readable.js:1143:12)",
" at process._tickCallback (internal/process/next_tick.js:63:19)"
]
Code:
Says the issues is line 42.25 which is the context.fail line.
if (error) {
logFailure(error, failedItems);
context.fail(JSON.stringify(error));
} else {
console.log('Success: ' + JSON.stringify(success));
context.succeed('Success');
}
// v1.1.2
var https = require('https');
var zlib = require('zlib');
var crypto = require('crypto');
var endpoint = 'vpc-esdxl-hx2nubxsqn3wh4hqwmywyy2g6y.us-east-1.es.amazonaws.com';
// Set this to true if you want to debug why data isn't making it to
// your Elasticsearch cluster. This will enable logging of failed items
// to CloudWatch Logs.
var logFailedResponses = false;
exports.handler = function(input, context) {
// decode input from base64
var zippedInput = Buffer.from(input.awslogs.data, 'base64');
// decompress the input
zlib.gunzip(zippedInput, function(error, buffer) {
if (error) { context.fail(error); return; }
// parse the input from JSON
var awslogsData = JSON.parse(buffer.toString('utf8'));
// transform the input to Elasticsearch documents
var elasticsearchBulkData = transform(awslogsData);
// skip control messages
if (!elasticsearchBulkData) {
console.log('Received a control message');
context.succeed('Control message handled successfully');
return;
}
// post documents to the Amazon Elasticsearch Service
post(elasticsearchBulkData, function(error, success, statusCode, failedItems) {
console.log('Response: ' + JSON.stringify({
"statusCode": statusCode
}));
if (error) {
logFailure(error, failedItems);
context.fail(JSON.stringify(error));
} else {
console.log('Success: ' + JSON.stringify(success));
context.succeed('Success');
}
});
});
};
function transform(payload) {
if (payload.messageType === 'CONTROL_MESSAGE') {
return null;
}
var bulkRequestBody = '';
payload.logEvents.forEach(function(logEvent) {
var timestamp = new Date(1 * logEvent.timestamp);
// index name format: atg-YYYY.MM.DD
var indexName = [
'atg-' + timestamp.getUTCFullYear(), // year
('0' + (timestamp.getUTCMonth() + 1)).slice(-2), // month
('0' + timestamp.getUTCDate()).slice(-2) // day
].join('.');
var source = buildSource(logEvent.message, logEvent.extractedFields);
var DXLRegex = /\#DXLORDER\#/;
// var LogDatetimeRegex = /(info|Error|Warning|debug)\t(Mon\s|Sun\s|Sat\s|Fri\s|Thu\s|Wed\s|Tue\s)(.*)\t([0-9])/;
var logtyperegex = /(info|Error|Warning|debug)/;
var WOrderRegex = /\|((W)[0-9][0-9][0-9][0-9][0-9][0-9][0-9])\|/;
var DXLOrderRegex = /(DXL([0-9][0-9][0-9][0-9][0-9][0-9][0-9]|[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]|[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]|[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]))\|/;
var CustEMailRegex = /(DXL([0-9][0-9][0-9][0-9][0-9][0-9][0-9]|[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]|[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]|[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]))\|\s*((.*)#(.*)(\.com|\.net|\.live|\.gov)\s*)\|/;
var EMailReferralRegex = /(#DXLORDER#)\|(SFMC),(\s*)(.*?)(\s*)\|(W[0-9]|\|[0-9])/;
var CouponRegex = /(DXL([0-9][0-9][0-9][0-9][0-9][0-9][0-9]|[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]|[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]|[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9])\|(((.*)#(.*(.com|.net|.live|)))\|)(.*)\|)/;
var atgpathregex = /(\/)(.*)(\t)/;
var carttotalregex = /(W[0-9][0-9][0-9][0-9][0-9][0-9][0-9]|null)\|(.*)\|([0-9]|[0-9][0-9]|[0-9][0-9][0-9])\.([0-9]|[0-9][0-9]|[0-9][0-9][0-9])\.([0-9]|[0-9][0-9]|[0-9][0-9][0-9])\.([0-9]|[0-9][0-9]|[0-9][0-9][0-9])\|(DXL)/;
var referralregex = /(#DXLORDER#)\s*\|\s*(.*)\s*,\s*(.*)\s*\|(W[0-9]|\|[0-9])/;
var ClientIPregex = /(([0-9][0-9]|[0-9][0-9][0-9]|[0-9])\.([0-9][0-9]|[0-9][0-9][0-9]|[0-9])\.([0-9][0-9]|[0-9][0-9][0-9]|[0-9])\.([0-9][0-9]|[0-9][0-9][0-9]|[0-9]))\|/;
var ATGIDRegex = /(20[0-9][0-9])(\t)(.*)(\t)(\/)/;
var EmailSegmentRegex = /([a-z]w)([0-9][0-9][0-9][0-9][0-9][0-9])_([0-9][0-9][0-9][0-9])_([A-Z]|[A-Z][A-Z])\|/;
var usernamesucregex = /(findUser: user )(.*)( found.)/;
var incorrectpassregex = /(Attempt to log into account )(.*)(with incorrect password)/;
var nonexistentregex = /(Attempt to log into nonexistent account )(.*)/;
var badcouponregex = /(noCouponFound: No coupon could be found for the claim code:)( )(.*)(.)/;
var ccauthamtregex = /(<R20_CC_AUTH_AMT>)(.*)(<\/R20_CC_AUTH_AMT>)/;
var couponremovedregex = /(in removeCoupon method : Coupon )(.*)( has been expired so removing couon from order )(.*)/;
var giftdollaramtregex = /(<GIFT_DOL>)( )(.*)(<\/GIFT_DOL.)/;
var giftredeemdollarregex = /(<GIFTCERT_REDEEM_DOL>)( )(.*)(<\/GIFTCERT_REDEEM_DOL>)/;
var itemnumregex = /(ITEM_NUM>)(([0-9][0-9][0-9][0-9][0-9] |[A-Z][0-9][0-9][0-9][0-9] )(.*)|HEM)((<\/[A-Z][0-9][0-9]_([0-9]|[0-9][0-9])_ITEM_NUM>))/;
var giftcardpinregex = /(<GIFT_CARD_PIN>)(.*)(<\/GIFT_CARD_PIN>)/;
var linkshareregex = /(#DXLORDER#)\s*\|\s*(linkshare)\s*,\s*(.*)\s*\|(W[0-9]|\|[0-9])/;
var giftcardnumregex = /<GIFT_CARD_NUM>(.*)<\/GIFT_CARD_NUM>/;
var giftcardprocesseddollarregex = /Payment Status in Payment Group,\sso using it.(.*):(.*)/;
var outofstockregex = /(We're sorry,\s*Item :\s*)(.*)\s([A-Z][0-9][0-9][0-9][0-9])(.*)/;
if(outofstockregex.exec(logEvent.message) != null){
var outofstockdesc = outofstockregex.exec(logEvent.message)[2];
var outofstockid = outofstockregex.exec(logEvent.message)[3];
} else{
outofstockdesc = "-";
outofstockid = "-";
}
if(giftcardprocesseddollarregex.exec(logEvent.message) != null){
var giftcardprocesseddollar = Number(giftcardprocesseddollarregex.exec(logEvent.message)[2]);
} else{
giftcardprocesseddollar = 0;
}
if(giftcardnumregex.exec(logEvent.message) != null){
var giftcardnum = giftcardnumregex.exec(logEvent.message)[1];
} else{
giftcardnum = "-";
}
// if(LogDatetimeRegex.exec(logEvent.message) != null){
// var datetimelog = Date(LogDatetimeRegex.exec(logEvent.message)[3]);
// } else {
// datetimelog = "-";
// }
if(DXLRegex.exec(logEvent.message) != null){
var DXLORDER = "DXLORDER";
} else {
DXLORDER = "False";
}
if(giftcardpinregex.exec(logEvent.message) != null){
var giftcardpin = giftcardpinregex.exec(logEvent.message)[2];
} else{
giftcardpin = "-";
}
if(itemnumregex.exec(logEvent.message) != null ){
var itemnum = itemnumregex.exec(logEvent.message)[3];
var itemdescr = itemnumregex.exec(logEvent.message)[4];
if(itemdescr == null && itemnumregex.exec(logEvent.message)[2] == "HEM"){
itemnum = "-";
itemdescr = "HEM";
}
} else {
itemnum = "-";
itemdescr ="-";
}
if(giftredeemdollarregex.exec(logEvent.message) != null){
var giftredeemdollaramt = giftredeemdollarregex.exec(logEvent.message)[3];
} else{
giftredeemdollaramt = "0";
}
if(giftdollaramtregex.exec(logEvent.message) != null){
var giftdollaramt = giftdollaramtregex.exec(logEvent.message)[3];
} else {
giftdollaramt = "0";
}
if(couponremovedregex.exec(logEvent.message) != null){
var removedcoupon = couponremovedregex.exec(logEvent.message)[2];
} else{
removedcoupon = "N/A";
}
if(couponremovedregex.exec(logEvent.message) != null){
var removedcouponorder = couponremovedregex.exec(logEvent.message)[4];
} else{
removedcouponorder = "N/A";
}
if(ccauthamtregex.exec(logEvent.message) != null){
var ccauthamt = ccauthamtregex.exec(logEvent.message)[2];
} else{
ccauthamt = "0";
}
if(badcouponregex.exec(logEvent.message) != null){
var badcoupon = badcouponregex.exec(logEvent.message)[3];
} else{
badcoupon = "-";
}
if(nonexistentregex.exec(logEvent.message) != null){
var nonexistent = nonexistentregex.exec(logEvent.message)[2];
} else{
nonexistent = "-";
}
if(incorrectpassregex.exec(logEvent.message) != null){
var incorrectpass = incorrectpassregex.exec(logEvent.message)[2];
} else{
incorrectpass = "-";
}
if(usernamesucregex.exec(logEvent.message) != null){
var username = usernamesucregex.exec(logEvent.message)[2];
} else{
username = "-";
}
if (EmailSegmentRegex.exec(logEvent.message)!=null){
var EmailMailing = EmailSegmentRegex.exec(logEvent.message)[1];
var EmailDate = EmailSegmentRegex.exec(logEvent.message)[2];
var EmailTime = EmailSegmentRegex.exec(logEvent.message)[3];
var EmailSegment = EmailSegmentRegex.exec(logEvent.message)[4];
} else{
EmailMailing = "-";
EmailDate = "-";
EmailTime = "-";
EmailSegment = "-";
}
if (ATGIDRegex.exec(logEvent.message)!=null && DXLRegex.exec(logEvent.message) != null){
var ATGID = ATGIDRegex.exec(logEvent.message)[3];
} else{
ATGID = "-";
}
if (ClientIPregex.exec(logEvent.message)!= null && DXLRegex.exec(logEvent.message) != null){
var ClientIP = ClientIPregex.exec(logEvent.message)[1];
} else{
ClientIP = "-";
}
if (referralregex.exec(logEvent.message) != null){
var referralsource = referralregex.exec(logEvent.message)[2];
var referral = referralregex.exec(logEvent.message)[3];
if (referralregex.exec(logEvent.message)[2].includes("linkshare") == true) {
referralsource = linkshareregex.exec(logEvent.message)[2];
referral = linkshareregex.exec(logEvent.message)[3];
}
} else{
referralsource = "-";
referral = "-";
}
if (carttotalregex.exec(logEvent.message) != null && DXLRegex.exec(logEvent.message) != null){
var carttotal = Number(carttotalregex.exec(logEvent.message)[2]);
} else {
carttotal = 0;
}
if (atgpathregex.exec(logEvent.message) != null && DXLRegex.exec(logEvent.message) != null){
var atgpath = atgpathregex.exec(logEvent.message)[0];
} else {
atgpath = "-";
}
if (CouponRegex.exec(logEvent.message) != null && DXLRegex.exec(logEvent.message) != null){
var Coupon = CouponRegex.exec(logEvent.message)[8];
var Coupon1 = Coupon.toUpperCase();
if(CouponRegex.exec(logEvent.message)[8] == "null"){
Coupon = "-";
}
} else {
Coupon = "-";
}
if (logtyperegex.exec(logEvent.message) != null){
var logtype = logtyperegex.exec(logEvent.message)[0];
} else{
logtype = "-";
}
if (EMailReferralRegex.exec(logEvent.message)!= null){
var EmailReferral = EMailReferralRegex.exec(logEvent.message)[4];
} else{
EmailReferral = "-";
}
if (CustEMailRegex.exec(logEvent.message)!= null && DXLRegex.exec(logEvent.message) != null){
var CustEMail = CustEMailRegex.exec(logEvent.message)[3];
} else{
CustEMail = "-";
}
if(DXLOrderRegex.exec(logEvent.message) != null && DXLRegex.exec(logEvent.message) != null){
var ATGOrderID = DXLOrderRegex.exec(logEvent.message)[1];
} else {
ATGOrderID = "-";
}
if(WOrderRegex.exec(logEvent.message)!= null && DXLRegex.exec(logEvent.message) != null){
var WOrder = WOrderRegex.exec(logEvent.message)[1];
} else{
WOrder = "-";
}
source['outofstockdesc'] = outofstockdesc;
source['outofstockid'] = outofstockid;
source['giftcardprocesseddollar'] = Number(1*giftcardprocesseddollar);
source['giftcardnum'] = giftcardnum;
source['DXLORDER'] = DXLORDER;
source['giftcardpin'] = giftcardpin;
source['itemnum'] = itemnum ;
source['itemdescr'] = itemdescr;
source['GiftcertRedeemDol'] = "$"+giftredeemdollaramt;
source['giftdollaramt']=giftdollaramt;
source['removedcoupon'] = removedcoupon;
source['removedcouponorder'] = removedcouponorder;
source['ccauthamt'] = ccauthamt;
source['badcoupon'] = badcoupon;
source['nonexistent'] = nonexistent;
source['incorrectpass'] = incorrectpass;
source['username'] = username;
source['EmailMailing'] = EmailMailing;
source['EmailDate']=EmailDate;
source['EmailTime'] = EmailTime;
source['EmailSegment'] = EmailSegment;
source['ATGID'] = ATGID;
source['ClientIP'] = ClientIP;
source['referral'] = referral;
source['referralsource'] = referralsource;
//source['carttotal'] = Number(1* carttotal);
source['carttotaldollar'] = Number(1* carttotal);
source['atgpath'] = atgpath;
source['Coupon'] = Coupon1;
source['logtype'] = logtype;
source['EMailReferral'] = EmailReferral;
source['CustomerEMail'] = CustEMail;
source['ATGOrderID'] = ATGOrderID;
source['WOrder'] = WOrder;
// source['datetimelog'] = new Date(datetimelog).toISOString();
source['#id'] = logEvent.id;
source['#timestamp'] = new Date(1 * logEvent.timestamp).toISOString();
source['#message'] = logEvent.message;
source['#owner'] = payload.owner;
source['#log_group'] = payload.logGroup;
source['#log_stream'] = payload.logStream;
var action = { "index": {} };
action.index._index = indexName;
action.index._type = payload.logGroup;
action.index._id = logEvent.id;
bulkRequestBody += [
JSON.stringify(action),
JSON.stringify(source),
].join('\n') + '\n';
});
return bulkRequestBody;
}
function buildSource(message, extractedFields) {
if (extractedFields) {
var source = {};
for (var key in extractedFields) {
if (extractedFields.hasOwnProperty(key) && extractedFields[key]) {
var value = extractedFields[key];
if (isNumeric(value)) {
source[key] = 1 * value;
continue;
}
jsonSubString = extractJson(value);
if (jsonSubString !== null) {
source['$' + key] = JSON.parse(jsonSubString);
}
source[key] = value;
}
}
return source;
}
jsonSubString = extractJson(message);
if (jsonSubString !== null) {
return JSON.parse(jsonSubString);
}
return {};
}
function extractJson(message) {
var jsonStart = message.indexOf('{');
if (jsonStart < 0) return null;
var jsonSubString = message.substring(jsonStart);
return isValidJson(jsonSubString) ? jsonSubString : null;
}
function isValidJson(message) {
try {
JSON.parse(message);
} catch (e) { return false; }
return true;
}
function isNumeric(n) {
return !isNaN(parseFloat(n)) && isFinite(n);
}
function post(body, callback) {
var requestParams = buildRequest(endpoint, body);
var request = https.request(requestParams, function(response) {
var responseBody = '';
response.on('data', function(chunk) {
responseBody += chunk;
});
response.on('end', function() {
var info = JSON.parse(responseBody);
var failedItems;
var success;
if (response.statusCode >= 200 && response.statusCode < 299) {
failedItems = info.items.filter(function(x) {
return x.index.status >= 300;
});
success = {
"attemptedItems": info.items.length,
"successfulItems": info.items.length - failedItems.length,
"failedItems": failedItems.length
};
}
var error = response.statusCode !== 200 || info.errors === true ? {
"statusCode": response.statusCode,
"responseBody": responseBody
} : null;
callback(error, success, response.statusCode, failedItems);
});
}).on('error', function(e) {
callback(e);
});
request.end(requestParams.body);
}
function buildRequest(endpoint, body) {
var endpointParts = endpoint.match(/^([^\.]+)\.?([^\.]*)\.?([^\.]*)\.amazonaws\.com$/);
var region = endpointParts[2];
var service = endpointParts[3];
var datetime = (new Date()).toISOString().replace(/[:\-]|\.\d{3}/g, '');
var date = datetime.substr(0, 8);
var kDate = hmac('AWS4' + process.env.AWS_SECRET_ACCESS_KEY, date);
var kRegion = hmac(kDate, region);
var kService = hmac(kRegion, service);
var kSigning = hmac(kService, 'aws4_request');
var request = {
host: endpoint,
method: 'POST',
path: '/_bulk',
body: body,
headers: {
'Content-Type': 'application/json',
'Host': endpoint,
'Content-Length': Buffer.byteLength(body),
'X-Amz-Security-Token': process.env.AWS_SESSION_TOKEN,
'X-Amz-Date': datetime
}
};
var canonicalHeaders = Object.keys(request.headers)
.sort(function(a, b) { return a.toLowerCase() < b.toLowerCase() ? -1 : 1; })
.map(function(k) { return k.toLowerCase() + ':' + request.headers[k]; })
.join('\n');
var signedHeaders = Object.keys(request.headers)
.map(function(k) { return k.toLowerCase(); })
.sort()
.join(';');
var canonicalString = [
request.method,
request.path, '',
canonicalHeaders, '',
signedHeaders,
hash(request.body, 'hex'),
].join('\n');
var credentialString = [ date, region, service, 'aws4_request' ].join('/');
var stringToSign = [
'AWS4-HMAC-SHA256',
datetime,
credentialString,
hash(canonicalString, 'hex')
] .join('\n');
request.headers.Authorization = [
'AWS4-HMAC-SHA256 Credential=' + process.env.AWS_ACCESS_KEY_ID + '/' + credentialString,
'SignedHeaders=' + signedHeaders,
'Signature=' + hmac(kSigning, stringToSign, 'hex')
].join(', ');
return request;
}
function hmac(key, str, encoding) {
return crypto.createHmac('sha256', key).update(str, 'utf8').digest(encoding);
}
function hash(str, encoding) {
return crypto.createHash('sha256').update(str, 'utf8').digest(encoding);
}
function logFailure(error, failedItems) {
if (logFailedResponses) {
console.log('Error: ' + JSON.stringify(error, null, 2));
if (failedItems && failedItems.length > 0) {
console.log("Failed Items: " +
JSON.stringify(failedItems, null, 2));
}
}
}
I dont see a context.fail() function in the AWS docs
the callback() function takes two arguments: error, response.
Have you tried changing context to callback?:
if (error) {
logFailure(error, failedItems);
callback(error);
} else {
console.log('Success: ' + JSON.stringify(success));
callback(null, 'Success');
}
See Related

Image::GetFrameCount sets Image::LastError to win32error c++ gdi+

This function changes active frame of image after some time
But when i try to get frame count of the dimension it sets image->lasterror to Win32Error
Is there is any way to fix it?
(Windows 10,Visual Studio 2017 community)
void PlayImageAnim(Gdiplus::Image*&image, int delay,bool Looped)
{
using namespace Gdiplus;
if (Looped == true)
{
while (true)
{
UINT dcount = 0;
GUID *dimensionsIDs;
dcount = image->GetFrameDimensionsCount();
dimensionsIDs = new GUID[dcount];
UINT frame_count = image->GetFrameCount(&dimensionsIDs[0]);
int y = 0;
GUID pageGUID = FrameDimensionTime;
int size = image->GetPropertyItemSize(PropertyTagFrameDelay);
Gdiplus::PropertyItem*pr_item = (Gdiplus::PropertyItem*)malloc(size);
for (UINT i = 0; i <= frame_count; i++)
{
/*graphics.DrawImage(image, image_rect);*/
/*long delay = ((long*)pr_item->value)[i] * 10;*/
image->SelectActiveFrame(&pageGUID, i);
std::this_thread::sleep_for(std::chrono::milliseconds(delay));
}
image->SelectActiveFrame(&pageGUID, 0);
}
}
else
{
UINT dcount = 0;
GUID *dimensionsIDs;
dcount = image->GetFrameDimensionsCount();
dimensionsIDs = new GUID[dcount];
UINT frame_count = image->GetFrameCount(&dimensionsIDs[0]);
GUID pageGUID = FrameDimensionTime;
int size = image->GetPropertyItemSize(PropertyTagFrameDelay);
Gdiplus::PropertyItem*pr_item = (Gdiplus::PropertyItem*)malloc(size);
for (UINT i = 0; i <= frame_count; i++)
{
image->SelectActiveFrame(&pageGUID, i);
std::this_thread::sleep_for(std::chrono::milliseconds(delay));
}
image->SelectActiveFrame(&pageGUID, 0);
}
}
it has to similar parts for other parts of project

Writinging directly to filesystem

I want to directly writing to file system and not every line to a string, because that is much more time consuming.
I try it like this:
static void Main(string[] args)
{
string path = #"C:\BankNumber";
var bans = BankAcoutNumbers.BANS;
const int MAX_FILES = 80;
const int BANS_PER_FILE = 8181 / 80;
int bansCounter = 0;
var part = new List<int>();
var maxNumberOfFiles = 10;
Stopwatch timer = new Stopwatch();
var fileCounter = 0;
if (!Directory.Exists(path))
{
DirectoryInfo di = Directory.CreateDirectory(path);
}
try
{
while (fileCounter <= maxNumberOfFiles)
{
timer.Start();
Console.WriteLine("Start writing the bank numbers to file system");
// Console.WriteLine(timer.Start());
foreach (var bank in BankAcoutNumbers.BANS)
{
part.Add(bank);
if (++bansCounter >= BANS_PER_FILE)
{
string fileName = string.Format("{0}.txt-{1}.txt", part[0], part[part.Count - 1]);
string outputToFile = "";// Otherwise you dont see the lines in the file. Just single line!!
string subString = System.IO.Path.Combine(path, "nr");//Needed to add, because otherwise the files will not stored in the correct folder!!
fileName = subString + fileName;
foreach (var partBan in part)
{
using(StreamWriter st = new StreamWriter(fileName))
{
System.IO.File.WriteAllText(fileName, outputToFile);
}
//Console.WriteLine(partBan);
// outputToFile += partBan + Environment.NewLine;//Writing the lines to the file
}
;//Writes to file system.
part.Clear();
bansCounter = 0;
//System.IO.File.WriteAllText(fileName, part.ToString());
if (++fileCounter >= MAX_FILES)
break;
}
}
}
timer.Stop();
Console.WriteLine("Total time of writing the bank numbers to file system " + timer.Elapsed.Seconds + " seconds");
//Console.WriteLine(BankAcoutNumbers.BANS.Count());
}
catch (Exception)
{
throw;
}
System.Console.WriteLine("Press any key to exit.");
System.Console.ReadKey();
}
I try it like this:
foreach (var partBan in part)
{
using(StreamWriter st = new StreamWriter(fileName))
{
System.IO.File.WriteAllText(fileName, outputToFile);
}
//Console.WriteLine(partBan);
// outputToFile += partBan + Environment.NewLine;//Writing the lines to the file
}
Thank you
I try it like this:
foreach (var partBan in part)
{
using(StreamWriter st = new StreamWriter(fileName))
{
//System.IO.File.WriteAllText(subString, fileName);
st.WriteLine(outputToFile);
}
//Console.WriteLine(partBan);
// outputToFile += partBan + Environment.NewLine;//Writing the lines to the file
}
But I dont see the content of the files. The files are empty
I try it like this:
public class Program
{
static void Main(string[] args)
{
string path = #"C:\BankNumber";
var bans = BankAcoutNumbers.BANS;
const int MAX_FILES = 80;
const int BANS_PER_FILE = 81818182 / 80;
int bansCounter = 0;
var part = new List<int>();
var maxNumberOfFiles = 10;
Stopwatch timer = new Stopwatch();
var fileCounter = 0;
if (!Directory.Exists(path))
{
DirectoryInfo di = Directory.CreateDirectory(path);
}
try
{
while (fileCounter <= maxNumberOfFiles)
{
timer.Start();
Console.WriteLine("Start writing the bank numbers to file system");
// Console.WriteLine(timer.Start());
foreach (var bank in BankAcoutNumbers.BANS)
{
part.Add(bank);
if (++bansCounter >= BANS_PER_FILE)
{
string fileName = string.Format("{0}.txt-{1}.txt", part[0], part[part.Count - 1]);
//string outputToFile = "";// Otherwise you dont see the lines in the file. Just single line!!
StringBuilder OutputFile = new StringBuilder("");
string subString = System.IO.Path.Combine(path, "nr");//Needed to add, because otherwise the files will not stored in the correct folder!!
fileName = subString + fileName;
foreach (var partBan in part)
{
OutputFile.Append(string.Format("{0}{1}", partBan.ToString(), Environment.NewLine));
//using(StreamWriter st = new StreamWriter(fileName))
//{
// //System.IO.File.WriteAllText(subString, fileName);
// StringBuilder strBuilder = new StringBuilder();
// strBuilder.Append(st.)
// st.Write(partBan + Environment.NewLine);
//}
//Console.WriteLine(partBan);
// outputToFile += partBan + Environment.NewLine;//Writing the lines to the file
}
//;//Writes to file system.
System.IO.File.WriteAllText(fileName, OutputFile.ToString());
part.Clear();
bansCounter = 0;
//System.IO.File.WriteAllText(fileName, part.ToString());
if (++fileCounter >= MAX_FILES)
break;
}
}
}
timer.Stop();
Console.WriteLine("Total time of writing the bank numbers to file system " + timer.Elapsed.TotalSeconds + " seconds");
//Console.WriteLine(BankAcoutNumbers.BANS.Count());
}
catch (Exception)
{
throw;
}
foreach (var item in part)
{
ThreadPool.QueueUserWorkItem(DoLongTask);
}
Console.WriteLine("Main thread ends");
System.Console.WriteLine("Press any key to exit.");
System.Console.ReadKey();
}
public static void DoLongTask(object input)
{
Console.WriteLine("Thread is background : {0}", Thread.CurrentThread.IsBackground);
Console.WriteLine("Input parameter : {0}", input);
}
}
WriteAllText is not slower then StreamWriter.
From the .Net source code here it how WriteAllText works:
private static void InternalWriteAllText(string path,
string contents, Encoding encoding)
{
Contract.Requires(path != null);
Contract.Requires(encoding != null);
Contract.Requires(path.Length > 0);
using (StreamWriter sw = new StreamWriter(path, false, encoding))
sw.Write(contents);
}
Slowness is caused by string. If to substitute string with StringBuilder perfomance will improve. Check this code:
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
namespace ConsoleApplication1
{
class BankAcoutNumbers
{
public List<int> BANS { get; set; }
public BankAcoutNumbers()
{
BANS = new List<int>();
BANS.Add(1456456465);
BANS.Add(2456465);
BANS.Add(342346465);
BANS.Add(445645646);
BANS.Add(545636546);
BANS.Add(64556465);
BANS.Add(7456465);
BANS.Add(842346465);
BANS.Add(9456456);
BANS.Add(10456365);
BANS.Add(11456456);
BANS.Add(12456465);
BANS.Add(1342346);
BANS.Add(1445645);
BANS.Add(1545636);
BANS.Add(1645645);
BANS.Add(1745646);
BANS.Add(1842345);
BANS.Add(194564);
BANS.Add(2045635);
BANS.Add(214564);
BANS.Add(224564);
BANS.Add(234234);
BANS.Add(244564);
BANS.Add(254563);
}
}
class Program
{
static void Main(string[] args)
{
string path = #"C:\";
//string fileName = string.Format("{0}{1}-", part[0], part[part.Count - 1]);
//var bans = BankAcoutNumbers.BANS;
const int MAX_FILES = 10;
const int BANS_PER_FILE = 10;
int bansCounter = 0;
var part = new List<int>();
//string fileName = string.Format("{0}-{1}", part[0], part[part.Count - 1]);
var maxNumberOfFiles = 10;
Stopwatch timer = new Stopwatch();
var fileCounter = 0;
if (!Directory.Exists(path))
{
DirectoryInfo di = Directory.CreateDirectory(path);
}
//var destiNationFile = new StreamWriter(string.Format(fileName, fileCounter + 1));
try
{
// foreach (var bank in BankAcoutNumbers.BANS.Take(100))
//{
while (fileCounter <= maxNumberOfFiles)
{
timer.Start();
foreach (var bank in new BankAcoutNumbers().BANS)
{
part.Add(bank);
if (++bansCounter >= BANS_PER_FILE)
{
string fileName = string.Format("{0}-{1}", part[0], part[part.Count - 1]);
StringBuilder outputToFile = new StringBuilder("");
//var destinationFile = new StreamWriter(fileName);
//destiNationFile = new StreamWriter(fileName);
Console.WriteLine("NR{0}", fileName);
fileName = #"C:\" + fileName;
foreach (var partBan in part)
{
outputToFile.Append(string.Format("{0}{1}", partBan , Environment.NewLine));
Console.WriteLine(partBan);
}
System.IO.File.WriteAllText(fileName, outputToFile.ToString());
part.Clear();
bansCounter = 0;
if (++fileCounter >= MAX_FILES)
break;
}
}
}
timer.Stop();
Console.WriteLine(timer.Elapsed.Seconds);
}
catch (Exception)
{
throw;
}
// Keep the console window open in debug mode.
System.Console.WriteLine("Press any key to exit.");
System.Console.ReadKey();
}
}
}

DomainCollectionView not changing pages

I'm trying to download lage amout of items from server and to avoid timeouts I'm using paging so what I tried to do is to load the first page and in my DomainCollectionView Loaded method call MoveToNextPage and so on untill all pages are loaded.
When I do that the DCV does not change from page, it remains on page 0, then I tried to refresh inside the PadeChanged event of the DCV but in there the PageIndex is -1 abd when I refresh the load operation is automatically cancelled.
this is my code:
private void OnLoadedPrintingModels(LoadOperation<AppointmentModel> op)
{
if (op.HasError)
{
MessageBox.Show(ApplicationStrings.PrintingConnectionError + Environment.NewLine + op.Error.Message);
op.MarkErrorAsHandled();
this.downloadDialog.Close();
this.TotalPrintCountD = 100.0;
this.downloadDialog = null;
this.printingList.Clear();
this.printingDomainList.Source = null;
}
else if (!op.IsCanceled)
{
if (op.Entities.Any())
{
printingList.AddRange(op.Entities);
if (this.isStartingPrinting)
{
this.TotalPrintCount = op.TotalEntityCount;
this.TotalPrintCountD = (double)op.TotalEntityCount;
this.isStartingPrinting = false;
}
isDownloaded = true;
printingDomainList.Source = op.Entities;
printingDomainView.SetTotalItemCount(op.TotalEntityCount);
}
else
{
MessageBox.Show(ApplicationStrings.NoSearchResults);
isStartingPrinting = true;
this.downloadDialog.Close();
this.TotalPrintCountD = 100.0;
this.downloadDialog = null;
this.printingList.Clear();
this.printingDomainList.Source = null;
DownloadedPrintingItems = 0.0;
}
}
else
{
MessageBox.Show(ApplicationStrings.PrintOperationCanceledByUser);
this.downloadPrintDataCanceled = false;
this.downloadDialog.Close();
this.TotalPrintCountD = 100.0;
this.downloadDialog = null;
this.printingList.Clear();
this.printingDomainList.Source = null;
isStartingPrinting = true;
DownloadedPrintingItems = 0.0;
}
}
bool isDownloaded = false;
void printingDomainView_PageChanged(object sender, EventArgs e)
{
if (isDownloaded)
{
isDownloaded = false;
DownloadedPrintingItems = 100.0 * (double)printingList.Count / this.TotalPrintCountD;
if (printingList.Count < this.TotalPrintCount)
{
using (this.printingDomainView.DeferRefresh())
{
this.printingDomainView.PageSize = PrintingPageSize;
this.printingDomainView.MoveToNextPage();
}
}
else
{
isStartingPrinting = true;
this.downloadDialog.Close();
this.downloadDialog = null;
DownloadedPrintingItems = 0.0;
ConfirmPrintingResults();
}
}
}
I'm trying to do many things here but nothing seems to work. Any help on how to do this will be very appreciated.
Thank you very much in advance.
I finally solved the problem by waiting for DomainCollectionView.IsChangingPage inside a thread using async and await, the resultant code is this:
private **async** void OnLoadedPrintingModels(LoadOperation<AppointmentModel> op)
{
if (op.HasError)
{
MessageBox.Show(ApplicationStrings.PrintingConnectionError + Environment.NewLine + op.Error.Message);
op.MarkErrorAsHandled();
this.downloadDialog.Close();
this.TotalPrintCountD = 100.0;
this.downloadDialog = null;
this.printingList.Clear();
this.printingDomainList.Source = null;
}
else if (!op.IsCanceled)
{
if (op.Entities.Any())
{
printingList.AddRange(op.Entities);
if (this.isStartingPrinting)
{
this.TotalPrintCount = op.TotalEntityCount;
this.TotalPrintCountD = (double)op.TotalEntityCount;
this.isStartingPrinting = false;
}
printingDomainList.Source = op.Entities;
printingDomainView.SetTotalItemCount(op.TotalEntityCount);
**await Task.Factory.StartNew(() =>
{
while (printingDomainView.IsPageChanging)
{
Thread.Sleep(1);
}
});**
DownloadedPrintingItems = 100.0 * (double)printingList.Count / this.TotalPrintCountD;
if (printingList.Count < this.TotalPrintCount)
{
using (this.printingDomainView.DeferRefresh())
{
this.printingDomainView.PageSize = PrintingPageSize;
this.printingDomainView.MoveToNextPage();
}
}
else
{
//this.IsBusy = false;
//this.WaitingMessage = ApplicationStrings.WaitLabel;
isStartingPrinting = true;
this.downloadDialog.Close();
//this.TotalPrintCountD = 100.0;
this.downloadDialog = null;
DownloadedPrintingItems = 0.0;
//Thread.CurrentThread.Join(1);
ConfirmPrintingResults();
}
}
else
{
MessageBox.Show(ApplicationStrings.NoSearchResults);
isStartingPrinting = true;
this.downloadDialog.Close();
this.TotalPrintCountD = 100.0;
this.downloadDialog = null;
this.printingList.Clear();
this.printingDomainList.Source = null;
DownloadedPrintingItems = 0.0;
}
}
else
{
MessageBox.Show(ApplicationStrings.PrintOperationCanceledByUser);
this.downloadPrintDataCanceled = false;
this.downloadDialog.Close();
this.TotalPrintCountD = 100.0;
this.downloadDialog = null;
this.printingList.Clear();
this.printingDomainList.Source = null;
isStartingPrinting = true;
DownloadedPrintingItems = 0.0;
}
}