Skip to content

Commit 01f0c0a

Browse files
committed
Add try catch around processing
1 parent 12e71fc commit 01f0c0a

1 file changed

Lines changed: 88 additions & 71 deletions

File tree

OculusDB/ScrapingMaster/ScrapingManaging.cs

Lines changed: 88 additions & 71 deletions
Original file line numberDiff line numberDiff line change
@@ -90,84 +90,101 @@ public static List<ScrapingTask> ConvertAppsToScrapeToScrapingTasks(List<AppToSc
9090

9191
public static ScrapingProcessedResult ProcessTaskResult(ScrapingNodeTaskResult taskResult, ScrapingNodeAuthenticationResult scrapingNodeAuthenticationResult)
9292
{
93-
if (!processingRn.ContainsKey(scrapingNodeAuthenticationResult.scrapingNode.scrapingNodeId))
94-
processingRn.Add(scrapingNodeAuthenticationResult.scrapingNode.scrapingNodeId,
93+
string scrapingNodeId = scrapingNodeAuthenticationResult.scrapingNode.scrapingNodeId;
94+
if (!processingRn.ContainsKey(scrapingNodeId))
95+
processingRn.Add(scrapingNodeId,
9596
new ScrapingNodeTaskResultProcessing());
96-
processingRn[scrapingNodeAuthenticationResult.scrapingNode.scrapingNodeId].Start();
97-
98-
string currency = scrapingNodeAuthenticationResult.scrapingNode.currency;
99-
if (!isAppAddingRunning.ContainsKey(currency))
100-
{
101-
isAppAddingRunning.Add(scrapingNodeAuthenticationResult.scrapingNode.currency, new());
102-
}
97+
processingRn[scrapingNodeId].Start();
10398
ScrapingProcessedResult r = new ScrapingProcessedResult();
104-
Logger.Log("Results of Scraping node " + scrapingNodeAuthenticationResult.scrapingNode + " received. Processing now...");
105-
Logger.Log("Result type: " +
106-
Enum.GetName(typeof(ScrapingNodeTaskResultType), taskResult.scrapingNodeTaskResultType));
107-
// Process results of scraping:
108-
// - When apps for scraping have been sent, add them to the DB for scraping
109-
// - On error while requesting apps to scrape, make other scraping nodes able to request apps to scrape
110-
// - When scraping is done, compute the activity entries and write both to the DB (Each scraped app should)
111-
switch (taskResult.scrapingNodeTaskResultType)
99+
try
112100
{
113-
case ScrapingNodeTaskResultType.Unknown:
114-
r.processed = false;
115-
r.msg = "Cannot process unknown task result type.";
116-
processingRn[scrapingNodeAuthenticationResult.scrapingNode.scrapingNodeId].Done();
117-
return r;
118-
case ScrapingNodeTaskResultType.ErrorWhileRequestingAppsToScrape:
119-
if (!isAppAddingRunning[currency].IsThisResponsible(scrapingNodeAuthenticationResult.scrapingNode))
120-
{
121-
Logger.Log("Node is not responsible for adding apps to scrape. Ignoring error.");
122-
r.processed = false;
123-
r.msg = "You are not responsible for adding apps to scrape. Your submission has been ignored.";
124-
r.failedCount = 1;
125-
processingRn[scrapingNodeAuthenticationResult.scrapingNode.scrapingNodeId].Done();
126-
return r;
127-
}
128-
Logger.Log("Error while requesting apps to scrape. Making other scraping nodes able to request apps to scrape.");
129-
isAppAddingRunning[currency].Set(false, TimeSpan.FromMinutes(10), "");
130-
break;
131-
case ScrapingNodeTaskResultType.FoundAppsToScrape:
132-
if (!isAppAddingRunning[currency].IsThisResponsible(scrapingNodeAuthenticationResult.scrapingNode))
133-
{
134-
Logger.Log("Node is not responsible for adding apps to scrape. Ignoring.");
101+
string currency = scrapingNodeAuthenticationResult.scrapingNode.currency;
102+
if (!isAppAddingRunning.ContainsKey(currency))
103+
{
104+
isAppAddingRunning.Add(scrapingNodeAuthenticationResult.scrapingNode.currency, new());
105+
}
106+
107+
Logger.Log("Results of Scraping node " + scrapingNodeAuthenticationResult.scrapingNode +
108+
" received. Processing now...");
109+
Logger.Log("Result type: " +
110+
Enum.GetName(typeof(ScrapingNodeTaskResultType), taskResult.scrapingNodeTaskResultType));
111+
// Process results of scraping:
112+
// - When apps for scraping have been sent, add them to the DB for scraping
113+
// - On error while requesting apps to scrape, make other scraping nodes able to request apps to scrape
114+
// - When scraping is done, compute the activity entries and write both to the DB (Each scraped app should)
115+
switch (taskResult.scrapingNodeTaskResultType)
116+
{
117+
case ScrapingNodeTaskResultType.Unknown:
135118
r.processed = false;
136-
r.msg = "You are not responsible for adding apps to scrape. Your submission has been ignored.";
137-
r.failedCount = 1;
138-
processingRn[scrapingNodeAuthenticationResult.scrapingNode.scrapingNodeId].Done();
139-
return r;
140-
}
119+
r.msg = "Cannot process unknown task result type.";
120+
break;
121+
case ScrapingNodeTaskResultType.ErrorWhileRequestingAppsToScrape:
122+
if (!isAppAddingRunning[currency].IsThisResponsible(scrapingNodeAuthenticationResult.scrapingNode))
123+
{
124+
Logger.Log("Node is not responsible for adding apps to scrape. Ignoring error.");
125+
r.processed = false;
126+
r.msg = "You are not responsible for adding apps to scrape. Your submission has been ignored.";
127+
r.failedCount = 1;
128+
break;
129+
}
141130

142-
if (taskResult.appsToScrape.Count > 0)
143-
{
144-
Logger.Log("Found apps to scrape. Adding them to the DB.");
145-
}
146-
else
147-
{
148-
Logger.Log("No new apps present in task result. Adding existing");
149-
}
150-
ScrapingNodeMongoDBManager.AddAppsToScrape(taskResult.appsToScrape, scrapingNodeAuthenticationResult.scrapingNode);
131+
Logger.Log(
132+
"Error while requesting apps to scrape. Making other scraping nodes able to request apps to scrape.");
133+
isAppAddingRunning[currency].Set(false, TimeSpan.FromMinutes(10), "");
134+
break;
135+
case ScrapingNodeTaskResultType.FoundAppsToScrape:
136+
if (!isAppAddingRunning[currency].IsThisResponsible(scrapingNodeAuthenticationResult.scrapingNode))
137+
{
138+
Logger.Log("Node is not responsible for adding apps to scrape. Ignoring.");
139+
r.processed = false;
140+
r.msg = "You are not responsible for adding apps to scrape. Your submission has been ignored.";
141+
r.failedCount = 1;
142+
break;
143+
}
151144

152-
isAppAddingRunning[currency].Unlock(scrapingNodeAuthenticationResult.scrapingNode);
153-
r.processed = true;
154-
r.processedCount = taskResult.appsToScrape.Count;
155-
r.msg = "Added " + taskResult.appsToScrape.Count + " apps to scrape. Thanks for the cooperation.";
156-
break;
157-
case ScrapingNodeTaskResultType.AppsScraped:
158-
try
159-
{
160-
ProcessScrapedResults(taskResult, scrapingNodeAuthenticationResult, ref r);
161-
r.msg = "Processed " + taskResult.scraped.applications.Count + " applications, " + taskResult.scraped.dlcs.Count + " dlcs, " + taskResult.scraped.dlcPacks.Count + " dlc packs, " + taskResult.scraped.versions.Count + " version and " + taskResult.scraped.imgs.Count + " images from scraping node " + scrapingNodeAuthenticationResult.scrapingNode + ". Thanks for your contribution.";
162-
}
163-
catch (Exception e)
164-
{
165-
Logger.Log("Error while processing scraped results of node " + scrapingNodeAuthenticationResult.scrapingNode + ": " + e, LoggingType.Warning);
166-
ReportErrorWithDiscordMessage(e.ToString());
167-
}
168-
break;
145+
if (taskResult.appsToScrape.Count > 0)
146+
{
147+
Logger.Log("Found apps to scrape. Adding them to the DB.");
148+
}
149+
else
150+
{
151+
Logger.Log("No new apps present in task result. Adding existing");
152+
}
153+
154+
ScrapingNodeMongoDBManager.AddAppsToScrape(taskResult.appsToScrape,
155+
scrapingNodeAuthenticationResult.scrapingNode);
156+
157+
isAppAddingRunning[currency].Unlock(scrapingNodeAuthenticationResult.scrapingNode);
158+
r.processed = true;
159+
r.processedCount = taskResult.appsToScrape.Count;
160+
r.msg = "Added " + taskResult.appsToScrape.Count + " apps to scrape. Thanks for the cooperation.";
161+
break;
162+
case ScrapingNodeTaskResultType.AppsScraped:
163+
try
164+
{
165+
ProcessScrapedResults(taskResult, scrapingNodeAuthenticationResult, ref r);
166+
r.msg = "Processed " + taskResult.scraped.applications.Count + " applications, " +
167+
taskResult.scraped.dlcs.Count + " dlcs, " + taskResult.scraped.dlcPacks.Count +
168+
" dlc packs, " + taskResult.scraped.versions.Count + " version and " +
169+
taskResult.scraped.imgs.Count + " images from scraping node " +
170+
scrapingNodeAuthenticationResult.scrapingNode + ". Thanks for your contribution.";
171+
}
172+
catch (Exception e)
173+
{
174+
Logger.Log(
175+
"Error while processing scraped results of node " +
176+
scrapingNodeAuthenticationResult.scrapingNode + ": " + e, LoggingType.Warning);
177+
ReportErrorWithDiscordMessage(e.ToString());
178+
}
179+
break;
180+
}
169181
}
170-
processingRn[scrapingNodeAuthenticationResult.scrapingNode.scrapingNodeId].Done();
182+
catch (Exception e)
183+
{
184+
ReportErrorWithDiscordMessage("Error processing Task result", e.ToString());
185+
}
186+
187+
processingRn[scrapingNodeId].Done();
171188
return r;
172189
}
173190

0 commit comments

Comments
 (0)