diff --git a/pom.xml b/pom.xml
index b99ea69..ebddca7 100644
--- a/pom.xml
+++ b/pom.xml
@@ -13,6 +13,11 @@
4.11
test
+
+ org.apache.commons
+ commons-math3
+ 3.2
+
net.sf.trove4j
trove4j
diff --git a/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/combineazuredataset/CombineFiles.java b/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/combineazuredataset/CombineFiles.java
new file mode 100644
index 0000000..cb44886
--- /dev/null
+++ b/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/combineazuredataset/CombineFiles.java
@@ -0,0 +1,197 @@
+/**
+ * Combine approach 2 by Dilshad H. Sallo (sallo@iit.uni-miskolc.hu)
+ */
+package hu.mta.sztaki.lpds.cloud.simulator.combineazuredataset;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.TreeMap;
+
+public class CombineFiles {
+ //Number of lines want to combine which represent unique function
+ Path path = null;
+ Path dest = null;
+ int recourdCounter = 0;
+ int numLines = 400;
+ String[] header = {"HashOwner","HashApp","HashFunction","Trigger","1","2","3","4","5","6","7","8","9","10","11","12","13","14","15","16","17","18","19","20","21","22","23","24","25","26","27","28","29","30","31","32","33","34","35","36","37","38","39","40","41","42","43","44","45","46","47","48","49","50","51","52","53","54","55","56","57","58","59","60","61","62","63","64","65","66","67","68","69","70","71","72","73","74","75","76","77","78","79","80","81","82","83","84","85","86","87","88","89","90","91","92","93","94","95","96","97","98","99","100","101","102","103","104","105","106","107","108","109","110","111","112","113","114","115","116","117","118","119","120","121","122","123","124","125","126","127","128","129","130","131","132","133","134","135","136","137","138","139","140","141","142","143","144","145","146","147","148","149","150","151","152","153","154","155","156","157","158","159","160","161","162","163","164","165","166","167","168","169","170","171","172","173","174","175","176","177","178","179","180","181","182","183","184","185","186","187","188","189","190","191","192","193","194","195","196","197","198","199","200","201","202","203","204","205","206","207","208","209","210","211","212","213","214","215","216","217","218","219","220","221","222","223","224","225","226","227","228","229","230","231","232","233","234","235","236","237","238","239","240","241","242","243","244","245","246","247","248","249","250","251","252","253","254","255","256","257","258","259","260","261","262","263","264","265","266","267","268","269","270","271","272","273","274","275","276","277","278","279","280","281","282","283","284","285","286","287","288","289","290","291","292","293","294","295","296","297","298","299","300","301","302","303","304","305","306","307","308","309","310","311","312","313","314","315","316","317","318","319","320","321","322","323","324","325","326","327","328","329","330","331","332","333","334","335","336","337","338","339","340","341","342","343","344","345","346","347","348","349","350","351","352","353","354","355","356","357","358","359","360","361","362","363","364","365","366","367","368","369","370","371","372","373","374","375","376","377","378","379","380","381","382","383","384","385","386","387","388","389","390","391","392","393","394","395","396","397","398","399","400","401","402","403","404","405","406","407","408","409","410","411","412","413","414","415","416","417","418","419","420","421","422","423","424","425","426","427","428","429","430","431","432","433","434","435","436","437","438","439","440","441","442","443","444","445","446","447","448","449","450","451","452","453","454","455","456","457","458","459","460","461","462","463","464","465","466","467","468","469","470","471","472","473","474","475","476","477","478","479","480","481","482","483","484","485","486","487","488","489","490","491","492","493","494","495","496","497","498","499","500","501","502","503","504","505","506","507","508","509","510","511","512","513","514","515","516","517","518","519","520","521","522","523","524","525","526","527","528","529","530","531","532","533","534","535","536","537","538","539","540","541","542","543","544","545","546","547","548","549","550","551","552","553","554","555","556","557","558","559","560","561","562","563","564","565","566","567","568","569","570","571","572","573","574","575","576","577","578","579","580","581","582","583","584","585","586","587","588","589","590","591","592","593","594","595","596","597","598","599","600","601","602","603","604","605","606","607","608","609","610","611","612","613","614","615","616","617","618","619","620","621","622","623","624","625","626","627","628","629","630","631","632","633","634","635","636","637","638","639","640","641","642","643","644","645","646","647","648","649","650","651","652","653","654","655","656","657","658","659","660","661","662","663","664","665","666","667","668","669","670","671","672","673","674","675","676","677","678","679","680","681","682","683","684","685","686","687","688","689","690","691","692","693","694","695","696","697","698","699","700","701","702","703","704","705","706","707","708","709","710","711","712","713","714","715","716","717","718","719","720","721","722","723","724","725","726","727","728","729","730","731","732","733","734","735","736","737","738","739","740","741","742","743","744","745","746","747","748","749","750","751","752","753","754","755","756","757","758","759","760","761","762","763","764","765","766","767","768","769","770","771","772","773","774","775","776","777","778","779","780","781","782","783","784","785","786","787","788","789","790","791","792","793","794","795","796","797","798","799","800","801","802","803","804","805","806","807","808","809","810","811","812","813","814","815","816","817","818","819","820","821","822","823","824","825","826","827","828","829","830","831","832","833","834","835","836","837","838","839","840","841","842","843","844","845","846","847","848","849","850","851","852","853","854","855","856","857","858","859","860","861","862","863","864","865","866","867","868","869","870","871","872","873","874","875","876","877","878","879","880","881","882","883","884","885","886","887","888","889","890","891","892","893","894","895","896","897","898","899","900","901","902","903","904","905","906","907","908","909","910","911","912","913","914","915","916","917","918","919","920","921","922","923","924","925","926","927","928","929","930","931","932","933","934","935","936","937","938","939","940","941","942","943","944","945","946","947","948","949","950","951","952","953","954","955","956","957","958","959","960","961","962","963","964","965","966","967","968","969","970","971","972","973","974","975","976","977","978","979","980","981","982","983","984","985","986","987","988","989","990","991","992","993","994","995","996","997","998","999","1000","1001","1002","1003","1004","1005","1006","1007","1008","1009","1010","1011","1012","1013","1014","1015","1016","1017","1018","1019","1020","1021","1022","1023","1024","1025","1026","1027","1028","1029","1030","1031","1032","1033","1034","1035","1036","1037","1038","1039","1040","1041","1042","1043","1044","1045","1046","1047","1048","1049","1050","1051","1052","1053","1054","1055","1056","1057","1058","1059","1060","1061","1062","1063","1064","1065","1066","1067","1068","1069","1070","1071","1072","1073","1074","1075","1076","1077","1078","1079","1080","1081","1082","1083","1084","1085","1086","1087","1088","1089","1090","1091","1092","1093","1094","1095","1096","1097","1098","1099","1100","1101","1102","1103","1104","1105","1106","1107","1108","1109","1110","1111","1112","1113","1114","1115","1116","1117","1118","1119","1120","1121","1122","1123","1124","1125","1126","1127","1128","1129","1130","1131","1132","1133","1134","1135","1136","1137","1138","1139","1140","1141","1142","1143","1144","1145","1146","1147","1148","1149","1150","1151","1152","1153","1154","1155","1156","1157","1158","1159","1160","1161","1162","1163","1164","1165","1166","1167","1168","1169","1170","1171","1172","1173","1174","1175","1176","1177","1178","1179","1180","1181","1182","1183","1184","1185","1186","1187","1188","1189","1190","1191","1192","1193","1194","1195","1196","1197","1198","1199","1200","1201","1202","1203","1204","1205","1206","1207","1208","1209","1210","1211","1212","1213","1214","1215","1216","1217","1218","1219","1220","1221","1222","1223","1224","1225","1226","1227","1228","1229","1230","1231","1232","1233","1234","1235","1236","1237","1238","1239","1240","1241","1242","1243","1244","1245","1246","1247","1248","1249","1250","1251","1252","1253","1254","1255","1256","1257","1258","1259","1260","1261","1262","1263","1264","1265","1266","1267","1268","1269","1270","1271","1272","1273","1274","1275","1276","1277","1278","1279","1280","1281","1282","1283","1284","1285","1286","1287","1288","1289","1290","1291","1292","1293","1294","1295","1296","1297","1298","1299","1300","1301","1302","1303","1304","1305","1306","1307","1308","1309","1310","1311","1312","1313","1314","1315","1316","1317","1318","1319","1320","1321","1322","1323","1324","1325","1326","1327","1328","1329","1330","1331","1332","1333","1334","1335","1336","1337","1338","1339","1340","1341","1342","1343","1344","1345","1346","1347","1348","1349","1350","1351","1352","1353","1354","1355","1356","1357","1358","1359","1360","1361","1362","1363","1364","1365","1366","1367","1368","1369","1370","1371","1372","1373","1374","1375","1376","1377","1378","1379","1380","1381","1382","1383","1384","1385","1386","1387","1388","1389","1390","1391","1392","1393","1394","1395","1396","1397","1398","1399","1400","1401","1402","1403","1404","1405","1406","1407","1408","1409","1410","1411","1412","1413","1414","1415","1416","1417","1418","1419","1420","1421","1422","1423","1424","1425","1426","1427","1428","1429","1430","1431","1432","1433","1434","1435","1436","1437","1438","1439","1440","Average","Count","Minimum","Maximum","percentile_Average_0","percentile_Average_1","percentile_Average_25","percentile_Average_50","percentile_Average_75","percentile_Average_99","percentile_Average_100","SampleCount","AverageAllocatedMb","AverageAllocatedMb_pct1","AverageAllocatedMb_pct5","AverageAllocatedMb_pct25","AverageAllocatedMb_pct50","AverageAllocatedMb_pct75","AverageAllocatedMb_pct95","AverageAllocatedMb_pct99","AverageAllocatedMb_pct100"};
+ String fileInvocation;
+ String fileExecution;
+ String fileMemory;
+
+ private final File toBeReadInvocation;
+ private final File toBeReadExecution;
+ private final File toBeReadMemory;
+
+ private BufferedReader ReaderInvocation;
+ private BufferedReader ReaderExecution;
+ private BufferedReader ReaderMemory;
+ /**
+ * Use sort map, hash function is unique, but memory file don't have hash function
+ */
+ SortedMap allInvocations = new TreeMap();
+ SortedMap allExecutions = new TreeMap();
+ ArrayList listMemory = new ArrayList();
+
+ int mCounter = 0;
+
+ public CombineFiles(String invocationFile, String exeTimeFile, String memoryFile) {
+ fileInvocation = invocationFile;
+ fileExecution = exeTimeFile;
+ fileMemory = memoryFile;
+
+ toBeReadInvocation = new File(fileInvocation);
+ toBeReadExecution = new File(fileExecution);
+ toBeReadMemory = new File(fileMemory);
+ path = Paths.get(fileInvocation);
+ loadFiles();
+ }//end constructor
+
+ private void loadFiles() {
+ long before = System.currentTimeMillis();
+ try {
+ //load invocations
+ String lineInvo = null;
+ ReaderInvocation = new BufferedReader(new FileReader(toBeReadInvocation));
+ String h = ReaderInvocation.readLine();//skip header
+ while((lineInvo = ReaderInvocation.readLine()) != null) {
+ // listInvocation.add(lineInvo);
+ String[] record = lineInvo.trim().split(",");
+ allInvocations.put(record[2], lineInvo); // put unique function
+ }//end while
+
+ //load execution
+ String lineExe = null;
+ ReaderExecution = new BufferedReader(new FileReader(toBeReadExecution));
+ String l = ReaderExecution.readLine();//skip header
+ while((lineExe = ReaderExecution.readLine()) != null ) {
+ String[] executionRowData = lineExe.trim().split(",");
+ // listExecution.add(executionRowData);
+ allExecutions.put(executionRowData[2], lineExe);
+ }//end while
+ //load memory
+ String lineMem = null;
+ ReaderMemory = new BufferedReader(new FileReader(toBeReadMemory));
+ String la = ReaderMemory.readLine();//skip header
+ while((lineMem = ReaderMemory.readLine()) != null) {
+ String[] MemRowData = lineMem.trim().split(",");
+ listMemory.add(MemRowData);
+ }//end while
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ System.out.println("loading took: " + (System.currentTimeMillis() - before)/1000 + "s");
+ }
+
+ public void combine() {
+
+ try {
+
+ RandomAccessFile raf = new RandomAccessFile(path.getParent() +"/"+ "functionsOneDay.csv", "rwd");
+
+ //write header to new file
+ String headerContents = Arrays.toString(header); //convert to string
+ headerContents = headerContents.replaceAll("\\s+","");
+ headerContents = headerContents.replace("[","");
+ headerContents = headerContents.replace("]","");
+ raf.writeBytes(headerContents + "\n"); //write header of invocations only
+ ///for iteration invocation
+ Set invoList = allInvocations.entrySet();
+ Iterator listInvos = invoList.iterator();
+
+ mainLoop: while(listInvos.hasNext() && numLines >0) { // will go throw all
+
+ Map.Entry invocationRecord = (Map.Entry)listInvos.next();
+ ///for iteration invocation
+ Set ExecList = allExecutions.entrySet();
+ Iterator listExec = ExecList.iterator(); // each time will not find will remove
+
+ while(listExec.hasNext()) {
+ Map.Entry executionRecord = (Map.Entry)listExec.next();
+
+ if(invocationRecord.getKey().equals(executionRecord.getKey())) {
+ mCounter = 0;//reset
+ while(mCounter < listMemory.size()) { // read line of execution time till end
+ String[] MemRowData = listMemory.get(mCounter);
+ //get hashes for owner, app and function
+ String HashOwnerMem = MemRowData[0];//hash owner of invocation
+ String HashHashAppMem = MemRowData[1];//hash owner of invocation
+
+ String inovRow = (String) invocationRecord.getValue();
+ String[] invocationRowData = inovRow.trim().split(",");
+
+ String exeRow = (String) executionRecord.getValue();
+ String[] executionRowData = exeRow.trim().split(",");
+ String HashOwnerInvo = invocationRowData[0];//hash owner of invocation
+ String HashHashAppInvo = invocationRowData[1];//hash owner of invocation
+
+ if(HashOwnerInvo.equals(HashOwnerMem) && HashHashAppInvo.equals(HashHashAppMem)) {
+
+ String[] dataExecution = Arrays.copyOfRange(executionRowData, 3, executionRowData.length); // remove duplicated hashs
+ String[] dataMemory = Arrays.copyOfRange(MemRowData, 2, MemRowData.length); // remove duplicated hashs
+
+ String dataExecutionString = Arrays.toString(dataExecution); //convert to string
+ String dataMemoryString = Arrays.toString(dataMemory); // convert to string
+ String dataInvocationString = Arrays.toString(invocationRowData);
+
+ dataInvocationString = dataInvocationString.replace("]",""); //eliminate undesired symbol
+ dataInvocationString = dataInvocationString.replace("[",""); //eliminate undesired symbol
+
+ dataExecutionString = dataExecutionString.replace("[",""); //eliminate undesired symbol
+ dataExecutionString = dataExecutionString.replace("]",""); //eliminate undesired symbol
+
+ dataMemoryString = dataMemoryString.replace("[",""); //eliminate undesired symbol
+ dataMemoryString = dataMemoryString.replace("]",""); //eliminate undesired symbol
+ String record =dataInvocationString + ","+ dataExecutionString +","+ dataMemoryString;
+ record = record.replaceAll("\\s+",""); //remove whitespace if exist
+
+ raf.writeBytes(record + "\n");
+ record = "";
+ recourdCounter++;
+ System.out.println("Record: " + recourdCounter + " finished");
+ numLines--;
+ allExecutions.remove(executionRecord.getKey());
+ continue mainLoop; // to skip looking for the rest of file
+ //when match go to main loop
+ }//end if
+ mCounter++;
+ }//end while memory
+ }//end if no need to check
+
+ }//end while execution
+
+ }//end while invocation
+ //Close files
+ ReaderInvocation.close();
+ ReaderMemory.close();
+ ReaderExecution.close();
+ raf.close();
+ } catch (IOException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+
+ System.err.println("total generated records is : " + recourdCounter);
+ }//end combine
+
+ /*
+ * After combining files into single, this will generate jobs directly from the file that combined
+ */
+ public void generatedirectly() {
+ dest = Paths.get(path.getParent() + "/ "+ "functionsOneDay.csv"); // get path for generated file
+ String file = String.valueOf(dest);
+ if(file.endsWith(".csv")) {
+ // new CSVReader(file, 0, recourdCounter, false, DCFJob.class, "row", true, 0, true);
+ }
+ }
+
+}
diff --git a/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/combineazuredataset/CombineFilesGabor.java b/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/combineazuredataset/CombineFilesGabor.java
new file mode 100644
index 0000000..f176ed0
--- /dev/null
+++ b/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/combineazuredataset/CombineFilesGabor.java
@@ -0,0 +1,75 @@
+/**
+ * Combine approach 1 by Gabor Kecskemeti (g.kecskemeti@ljmu.ac.uk)
+ */
+package hu.mta.sztaki.lpds.cloud.simulator.combineazuredataset;
+
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.util.TreeMap;
+import java.util.function.Consumer;
+import java.util.stream.Stream;
+
+public class CombineFilesGabor {
+ public static class StringPair {
+ public final String hashes;
+ public final String rest;
+
+ public StringPair(String h, String r) {
+ hashes = h;
+ rest = r;
+ }
+ }
+
+ public static TreeMap execution = new TreeMap<>();
+ public static TreeMap memory = new TreeMap<>();
+
+ public static void readAllLines(String file, Consumer singleLineProcessor) throws IOException {
+ try (Stream lineStream = Files.lines(new File(file).toPath())) {
+ lineStream.forEach(singleLineProcessor);
+ }
+ }
+
+ public static StringPair splitToPair(String line, int commasForSplit) {
+ int split = -1;
+ for (int commas = 0; commas < commasForSplit; commas++) {
+ split = line.indexOf(',', split + 1);
+ }
+ return new StringPair(line.substring(0, split), line.substring(split));
+ }
+
+ public static void splitPut(String line, TreeMap container, int commasForSplit) {
+ StringPair pair = splitToPair(line, commasForSplit);
+ container.put(pair.hashes, pair.rest);
+ }
+
+ public static void mergeLine(String line, BufferedWriter bw) {
+ StringPair execPair = splitToPair(line, 3);
+ StringPair memPair = splitToPair(line, 2);
+ String execRest = execution.get(execPair.hashes);
+ String memRest = memory.get(memPair.hashes);
+ if (execRest != null && memRest != null) {
+ try {
+ bw.write(line);
+ bw.write(execRest);
+ bw.write(memRest);
+ bw.write("\n");
+ } catch (IOException ioex) {
+ throw new RuntimeException(ioex);
+ }
+ }
+ }
+
+ public static void main(String[] args) throws Exception {
+ long currTime = System.currentTimeMillis();
+ readAllLines("../execution.csv", s -> splitPut(s, execution, 3));
+ readAllLines("../memory.csv", s -> splitPut(s, memory, 2));
+ try (BufferedWriter bw = new BufferedWriter(new FileWriter("../merged.csv"))) {
+ readAllLines("../invocation.csv", s -> mergeLine(s, bw));
+ }
+ System.out.println((System.currentTimeMillis() - currTime) + "ms");
+ }
+
+}
diff --git a/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/combineazuredataset/MainCombCSVFiles.java b/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/combineazuredataset/MainCombCSVFiles.java
new file mode 100644
index 0000000..d1560d6
--- /dev/null
+++ b/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/combineazuredataset/MainCombCSVFiles.java
@@ -0,0 +1,30 @@
+/**
+ * Combine approach 2 by Dilshad H. Sallo (sallo@iit.uni-miskolc.hu)
+ */
+package hu.mta.sztaki.lpds.cloud.simulator.combineazuredataset;
+
+public class MainCombCSVFiles {
+CombineFiles cf;
+
+ public MainCombCSVFiles(String invocationFile, String exeTimeFile, String memoryFile) {
+ if (invocationFile.endsWith(".csv") && exeTimeFile.endsWith(".csv") && memoryFile.endsWith(".csv")) {
+ cf = new CombineFiles(invocationFile, exeTimeFile, memoryFile);
+ long before = System.currentTimeMillis();
+ cf.combine();
+ cf.generatedirectly();
+ System.err.println("Files are combined successfully");
+ long after = System.currentTimeMillis();
+ System.err.println("Combined time is " + (after - before) / 1000 + " s");
+ }else {
+ System.out.println("The files are not in csv formats");
+ }
+ }
+ public static void main(String[] args) {
+ //path
+ String fileInvocation = "E:\\Bitbucket\\experiments\\invocation.csv";
+ String fileExecution = "E:\\Bitbucket\\experiments\\execution.csv";
+ String filememory = "E:\\Bitbucket\\experiments\\memory.csv";
+
+ new MainCombCSVFiles(fileInvocation, fileExecution, filememory);
+ }
+}
diff --git a/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/job/Job.java b/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/job/Job.java
index 68741af..b55d97f 100644
--- a/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/job/Job.java
+++ b/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/job/Job.java
@@ -120,7 +120,18 @@ public abstract class Job {
* Shows if this job has already been executed by the simulator.
*/
private boolean ran = false;
-
+ /**
+ * This job caused cold-start
+ */
+ public boolean is_cold = false;
+ /**
+ * Time job start simulated in instance
+ */
+ private long client_start_time = 0;
+ /**
+ * Time job end simulated in instance
+ */
+ private long client_end_time = 0;
/**
* The generic constructor to be used by most of the trace generators and in
* most of the use cases.
@@ -152,8 +163,8 @@ public Job(String id, long submit, long queue, long exec, int nprocs, double ppC
submittimeSecs = submit;
queuetimeSecs = queue;
exectimeSecs = exec;
- stoptimeSecs = queuetimeSecs + exectimeSecs + submittimeSecs;
- starttimeSecs = submittimeSecs + queuetimeSecs;
+ stoptimeSecs = client_end_time = queuetimeSecs + exectimeSecs + submittimeSecs;
+ starttimeSecs = client_start_time = submittimeSecs + queuetimeSecs;
midExecInstanceSecs = starttimeSecs + exectimeSecs / 2;
this.nprocs = nprocs;
// Assumes full CPU utilization for every processor for the complete
@@ -266,7 +277,22 @@ public long getStartTimeInstance() {
public long getMidExecInstanceSecs() {
return midExecInstanceSecs;
}
-
+ /**
+ * Determines the time instance start to simulate this job
+ *
+ * @return time of job start simulated
+ */
+ public long get_client_start_time() {
+ return this.client_start_time;
+ }
+ /**
+ * Determines the time instance end to simulate this job
+ *
+ * @return time of job end simulated
+ */
+ public long get_client_end_time() {
+ return this.client_end_time;
+ }
/**
* Simulator specific implementation that can set the real start time
* (allows the query of the current simulated time instance independent from
diff --git a/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/job/JobListAnalyser.java b/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/job/JobListAnalyser.java
index e81dd18..ef52a1d 100644
--- a/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/job/JobListAnalyser.java
+++ b/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/job/JobListAnalyser.java
@@ -97,5 +97,18 @@ public int compare(Job o1, Job o2) {
return Long.signum(o1.getStartTimeInstance() - o2.getStartTimeInstance());
}
};
+ /**
+ * A job comparator that allows the ordering of jobs based on their
+ * client start time instance.
+ */
+ public static final Comparator clientStartTimeComparator = new Comparator() {
+ @Override
+ public int compare(Job o1, Job o2) {
+ return Long.signum(o1.get_client_start_time() - o2.get_client_start_time());
+ //see it later
+
+ }
+ };
+
}
diff --git a/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/serverless/workload/generator/FaaSJob.java b/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/serverless/workload/generator/FaaSJob.java
new file mode 100644
index 0000000..c7a7ea1
--- /dev/null
+++ b/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/serverless/workload/generator/FaaSJob.java
@@ -0,0 +1,47 @@
+/*
+ * ========================================================================
+ * Helper classes to support simulations of large scale distributed systems
+ * ========================================================================
+ *
+ * This file is part of DistSysJavaHelpers.
+ *
+ * DistSysJavaHelpers is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * DistSysJavaHelpers is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ *
+ * (C) Copyright 2022, Gabor Kecskemeti (g.kecskemeti@ljmu.ac.uk)
+ * (C) Copyright 2022, Dilshad H. Sallo (sallo@iit.uni-miskolc.hu)
+ */
+package hu.mta.sztaki.lpds.cloud.simulator.helpers.serverless.workload.generator;
+
+import hu.mta.sztaki.lpds.cloud.simulator.helpers.job.Job;
+
+public class FaaSJob extends Job{
+
+ public FaaSJob(String id, long submit, long queue, long exec, int nprocs, double ppCpu, long ppMem, String user,
+ String group, String executable, Job preceding, long delayAfter) {
+ super(id, submit, queue, exec, nprocs, ppCpu, ppMem, user, group, executable, preceding, delayAfter);
+ }
+
+ @Override
+ public void started() {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void completed() {
+ // TODO Auto-generated method stub
+
+ }
+
+}
diff --git a/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/serverless/workload/generator/GenerateExecutionTime.java b/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/serverless/workload/generator/GenerateExecutionTime.java
new file mode 100644
index 0000000..dc1455e
--- /dev/null
+++ b/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/serverless/workload/generator/GenerateExecutionTime.java
@@ -0,0 +1,230 @@
+/*
+ * ========================================================================
+ * Helper classes to support simulations of large scale distributed systems
+ * ========================================================================
+ *
+ * This file is part of DistSysJavaHelpers.
+ *
+ * DistSysJavaHelpers is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * DistSysJavaHelpers is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ *
+ * (C) Copyright 2022, Gabor Kecskemeti (g.kecskemeti@ljmu.ac.uk)
+ * (C) Copyright 2022, Dilshad H. Sallo (sallo@iit.uni-miskolc.hu)
+ */
+package hu.mta.sztaki.lpds.cloud.simulator.helpers.serverless.workload.generator;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.PriorityQueue;
+import java.util.Random;
+
+import org.apache.commons.math3.genetics.AbstractListChromosome;
+import org.apache.commons.math3.genetics.Chromosome;
+import org.apache.commons.math3.genetics.ElitisticListPopulation;
+import org.apache.commons.math3.genetics.FixedGenerationCount;
+import org.apache.commons.math3.genetics.GeneticAlgorithm;
+import org.apache.commons.math3.genetics.InvalidRepresentationException;
+import org.apache.commons.math3.genetics.OnePointCrossover;
+import org.apache.commons.math3.genetics.Population;
+import org.apache.commons.math3.genetics.StoppingCondition;
+import org.apache.commons.math3.genetics.TournamentSelection;
+import org.apache.commons.math3.stat.regression.SimpleRegression;
+
+import hu.mta.sztaki.lpds.cloud.simulator.helpers.trace.random.DistributionSpecifier;
+
+public class GenerateExecutionTime {
+
+ // Total number of function invoked during a day
+ private static int totalCount;
+ // Execution time for each percentile
+ private static double p0, p1, p25, p50, p75, p99, p100;
+ private static double max;
+ // All execution time for each day within percentile
+ private static PriorityQueue allExecTime = new PriorityQueue();
+ //possition of percentiles
+ private static int pg0, pg1, pg25, pg50, pg75, pg99, pg100;
+ //object
+ private static DistributionSpecifier ds;
+ //Parameters for the GA
+ private static final int POPULATION_SIZE = 10;
+ private static final int NUM_GENERATIONS = 10;
+ private static final double ELITISM_RATE = 0.3;
+ private static final double CROSSOVER_RATE = 0.7;
+ private static final double MUTATION_RATE = 0.1;
+ private static final int TOURNAMENT_ARITY = 2;
+
+ //Probabilities that desired to start with
+ static double pro0 = 0.0024;
+ static double pro1 = 0.0113;
+ static double pro25 = 0.4123;
+ static double pro50 = 0.1185;
+ static double pro75 = 0.4349;
+ static double pro99 = 0.019;
+ static double pro100 = 0.0013;
+
+ //list of values that represent chromosome(individual)
+ private static List indList = new ArrayList();
+
+ public static void GeneExecTime(String totCount, String per0 ,String per1, String per25, String per50, String per75, String per99, String per100, String maxValue) {
+ allExecTime.clear();
+ totalCount = Integer.parseInt(totCount);
+ p0 = Double.parseDouble(per0);
+ p1 = Double.parseDouble(per1);
+ p25 = Double.parseDouble(per25);
+ p50 = Double.parseDouble(per50);
+ p75 = Double.parseDouble(per75);
+ p99 = Double.parseDouble(per99);
+ p100 = Double.parseDouble(per100);
+ max = Double.parseDouble(maxValue);
+
+ /*
+ * initialize a new genetic algorithm
+ */
+ GeneticAlgorithm ga = new GeneticAlgorithm(
+ new OnePointCrossover(),
+ CROSSOVER_RATE,
+ new SimpleMutation(),
+ MUTATION_RATE,
+ new TournamentSelection(TOURNAMENT_ARITY));
+ //Create populations
+ Population initial = randomPopulation();
+
+ StoppingCondition stopCond = new FixedGenerationCount(NUM_GENERATIONS);
+
+ // Run the algorithm
+ Population finalPopulation = ga.evolve(initial, stopCond);
+
+ // Best chromosome from the final population
+ Chromosome bestFinal = finalPopulation.getFittestChromosome();
+ // System.out.println(bestFinal.);
+
+ //Get list of values for best individual
+ String bestIndList =bestFinal.toString();
+
+ //eliminate odd symbol
+ bestIndList = bestIndList.replace("[", ",");
+ bestIndList = bestIndList.replace("]", ",");
+ bestIndList = bestIndList.replace("(", ",");
+ bestIndList = bestIndList.replace(")", ",");
+
+ //split String based on comma
+ String [] items = bestIndList.split("\\s*,\\s*");
+ List container = Arrays.asList(items);
+
+
+ //Add values of best individual to calculate percentiles and average
+ for(int j=3; j getAllExecTime(){
+ if(allExecTime.isEmpty())
+ reset();
+ return allExecTime;
+ }
+ // Reset all fields for next function
+ private static void reset() {
+ p0 = p1 = p25 = p50 = p75 = p99 = p100 = max = 0;
+ pg0 = pg1 = pg25 = pg50 = pg75 = pg99 = pg100 = 0;
+ totalCount = 0;
+
+ }
+ /*
+ * Create initial population
+ */
+ private static ElitisticListPopulation randomPopulation() {
+ List popList = new ArrayList();
+
+ for(int j=0; j{
+ //inner list representation
+ private static List innerRep = new ArrayList();
+
+ public Individual(List representation) throws InvalidRepresentationException {
+ super(representation);
+ }
+ @Override
+ public double fitness() {
+ innerRep.clear();//Reset
+ innerRep.addAll(this.getRepresentation());
+
+ Collections.sort(innerRep);//Sort list
+
+ //Calculate position of percentiles
+ pg0 = 0;
+ pg1 = (int) (0.01 * innerRep.size());
+ pg25 = (int) (0.25 * innerRep.size());
+ pg50 = (int) (0.5 * innerRep.size());
+ pg75 = (int) (0.75 * innerRep.size());
+ pg99 = (int) (0.99 * innerRep.size());
+ pg100 = innerRep.size()-1;
+ /*
+ * Calculate R-Squared
+ */
+ SimpleRegression R = new SimpleRegression();
+
+ R.addData(p0, (double) innerRep.get(pg0));
+ R.addData(p1, (double) innerRep.get(pg1));
+ R.addData(p25, (double) innerRep.get(pg25));
+ R.addData(p50, (double) innerRep.get(pg50));
+ R.addData(p75, (double) innerRep.get(pg75));
+ R.addData(p99, (double) innerRep.get(pg99));
+ R.addData(p100, (double) innerRep.get(pg100));
+
+ return R.getRSquare();
+ }
+
+ @Override
+ protected void checkValidity(List chromosomeRepresentation) throws InvalidRepresentationException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public AbstractListChromosome newFixedLengthChromosome(List chromosomeRepresentation) {
+ return new Individual(chromosomeRepresentation);
+ }
+ }//end class
+
+}
diff --git a/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/serverless/workload/generator/GenerateMemory.java b/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/serverless/workload/generator/GenerateMemory.java
new file mode 100644
index 0000000..ba4dbc4
--- /dev/null
+++ b/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/serverless/workload/generator/GenerateMemory.java
@@ -0,0 +1,224 @@
+/*
+ * ========================================================================
+ * Helper classes to support simulations of large scale distributed systems
+ * ========================================================================
+ *
+ * This file is part of DistSysJavaHelpers.
+ *
+ * DistSysJavaHelpers is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * DistSysJavaHelpers is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ *
+ * (C) Copyright 2022, Gabor Kecskemeti (g.kecskemeti@ljmu.ac.uk)
+ * (C) Copyright 2022, Dilshad H. Sallo (sallo@iit.uni-miskolc.hu)
+ */
+package hu.mta.sztaki.lpds.cloud.simulator.helpers.serverless.workload.generator;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.PriorityQueue;
+import java.util.Random;
+
+import org.apache.commons.math3.genetics.AbstractListChromosome;
+import org.apache.commons.math3.genetics.Chromosome;
+import org.apache.commons.math3.genetics.ElitisticListPopulation;
+import org.apache.commons.math3.genetics.FixedGenerationCount;
+import org.apache.commons.math3.genetics.GeneticAlgorithm;
+import org.apache.commons.math3.genetics.InvalidRepresentationException;
+import org.apache.commons.math3.genetics.OnePointCrossover;
+import org.apache.commons.math3.genetics.Population;
+import org.apache.commons.math3.genetics.StoppingCondition;
+import org.apache.commons.math3.genetics.TournamentSelection;
+import org.apache.commons.math3.stat.regression.SimpleRegression;
+
+import hu.mta.sztaki.lpds.cloud.simulator.helpers.trace.random.DistributionSpecifier;
+
+public class GenerateMemory {
+ // Total number of function invoked during a day
+ private static int totalCount;
+ // Execution time for each percentile
+ private static double p1, p5, p25, p50, p75, p95, p99, p100;
+ // All execution time for each day within percentile
+ private static PriorityQueue allMemoryValues = new PriorityQueue();
+ // Percentile with required execution time
+ private static DistributionSpecifier ds;
+
+ //Parameters for the GA
+ private static final int POPULATION_SIZE = 10;
+ private static final int NUM_GENERATIONS = 10;
+ private static final double ELITISM_RATE = 0.3;
+ private static final double CROSSOVER_RATE = 0.7;
+ private static final double MUTATION_RATE = 0.1;
+ private static final int TOURNAMENT_ARITY = 2;
+
+ private static int pg5, pg1, pg25, pg50, pg75, pg99, pg100, pg95;
+
+ //list of values that represent chromosome(individual)
+ private static List indList = new ArrayList();
+
+ public static void GeneMemory(String totCount, String per1, String per5, String per25, String per50, String per75,String per95 ,String per99, String per100) {
+ allMemoryValues.clear();
+ totalCount = Integer.parseInt(totCount.replace(".0", ""));
+ p1 = Double.parseDouble(per1.replace(".0", ""));
+ p5 = Double.parseDouble(per5.replace(".0", ""));
+ p25 = Double.parseDouble(per25.replace(".0", ""));
+ p50 = Double.parseDouble(per50.replace(".0", ""));
+ p75 = Double.parseDouble(per75.replace(".0", ""));
+ p95 = Double.parseDouble(per95.replace(".0", ""));
+ p99 = Double.parseDouble(per99.replace(".0", ""));
+ p100 = Double.parseDouble(per100.replace(".0", ""));
+
+ GeneticAlgorithm ga = new GeneticAlgorithm(
+ new OnePointCrossover(),
+ CROSSOVER_RATE,
+ new SimpleMutation(),
+ MUTATION_RATE,
+ new TournamentSelection(TOURNAMENT_ARITY));
+ //Create populations
+ Population initial = randomPopulation();
+
+ StoppingCondition stopCond = new FixedGenerationCount(NUM_GENERATIONS);
+
+ // Run the algorithm
+ Population finalPopulation = ga.evolve(initial, stopCond);
+
+ // Best chromosome from the final population
+ Chromosome bestFinal = finalPopulation.getFittestChromosome();
+ // System.out.println(bestFinal.);
+
+ //Get list of values for best individual
+ String bestIndList =bestFinal.toString();
+
+ //eliminate odd symbol
+ bestIndList = bestIndList.replace("[", ",");
+ bestIndList = bestIndList.replace("]", ",");
+ bestIndList = bestIndList.replace("(", ",");
+ bestIndList = bestIndList.replace(")", ",");
+
+ //split String based on comma
+ String [] items = bestIndList.split("\\s*,\\s*");
+ List container = Arrays.asList(items);
+
+
+ //Add values of best individual to calculate percentiles and average
+ for(int j=3; j getAllMemory(){
+ if (allMemoryValues.isEmpty()) {
+ return null;
+ }
+ reset();
+ return allMemoryValues;
+ }
+ // Rest all fields for next function
+ private static void reset() {
+ p1 = p5 = p25 = p50 = p75 = p95 = p99 = p100 = 0;
+ totalCount = 0;
+
+ }
+ private static ElitisticListPopulation randomPopulation() {
+ List popList = new ArrayList();
+
+ for(int j=0; j{
+ //inner list representation
+ private static List innerRep = new ArrayList();
+
+ public Individual(List representation) throws InvalidRepresentationException {
+ super(representation);
+
+ }
+
+ @Override
+ public double fitness() {
+ innerRep.clear();//Reset
+ innerRep.addAll(this.getRepresentation());
+
+ Collections.sort(innerRep);//Sort list
+
+ //Calculate position of percentiles
+ pg1 = (int) (0.01 * innerRep.size());
+ pg5 = (int) (0.05 * innerRep.size());
+ pg25 = (int) (0.25 * innerRep.size());
+ pg50 = (int) (0.5 * innerRep.size());
+ pg75 = (int) (0.75 * innerRep.size());
+ pg95 = (int) (0.95 * innerRep.size());
+ pg99 = (int) (0.99 * innerRep.size());
+ pg100 = innerRep.size()-1;
+ /*
+ * Calculate R-Squared
+ */
+ SimpleRegression R = new SimpleRegression();
+
+ R.addData(p1, (double) innerRep.get(pg1));
+ R.addData(p5, (double) innerRep.get(pg5));
+ R.addData(p25, (double) innerRep.get(pg25));
+ R.addData(p50, (double) innerRep.get(pg50));
+ R.addData(p75, (double) innerRep.get(pg75));
+ R.addData(p95, (double) innerRep.get(pg95));
+ R.addData(p99, (double) innerRep.get(pg99));
+ R.addData(p100, (double) innerRep.get(pg100));
+
+ return R.getRSquare();
+ }
+
+ @Override
+ protected void checkValidity(List chromosomeRepresentation) throws InvalidRepresentationException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public AbstractListChromosome newFixedLengthChromosome(List chromosomeRepresentation) {
+ return new Individual(chromosomeRepresentation);
+ }
+ }//end class
+}
\ No newline at end of file
diff --git a/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/serverless/workload/generator/MainServerlessTraceGenerator.java b/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/serverless/workload/generator/MainServerlessTraceGenerator.java
new file mode 100644
index 0000000..12ad1df
--- /dev/null
+++ b/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/serverless/workload/generator/MainServerlessTraceGenerator.java
@@ -0,0 +1,92 @@
+/*
+ * ========================================================================
+ * Helper classes to support simulations of large scale distributed systems
+ * ========================================================================
+ *
+ * This file is part of DistSysJavaHelpers.
+ *
+ * DistSysJavaHelpers is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * DistSysJavaHelpers is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ *
+ * (C) Copyright 2022, Gabor Kecskemeti (g.kecskemeti@ljmu.ac.uk)
+ * (C) Copyright 2022, Dilshad H. Sallo (sallo@iit.uni-miskolc.hu)
+ */
+package hu.mta.sztaki.lpds.cloud.simulator.helpers.serverless.workload.generator;
+
+import hu.mta.sztaki.lpds.cloud.simulator.helpers.trace.file.CSVReader;
+
+public class MainServerlessTraceGenerator {
+ /**
+ * Range of lines to be read in a trace
+ */
+ final int from = 1;
+ final int to = 4;
+ /**
+ * Extract information about users such as number of invoked tasks
+ */
+ final boolean extractAnalyseTrace = false;
+ /**
+ * Enable scaling workload with user behavior
+ * Note: number of lines will not consider in this case, instead required jobs will be considered
+ */
+ final boolean scaleUserBehaviour = false;
+ /**
+ * Approximate jobs based on user behavior
+ */
+ final int requiredJobs = 2000;
+ /**
+ * Range of minutes to consider in a day
+ */
+ final int minMinute = 0;
+ final int maxMinute = 1440;
+ /**
+ * Trigger type in trace, null will consider everything
+ */
+ final String triggerType = null;
+ /**
+ * Minimum execution (sec) time to consider (0 default)
+ */
+ final int minExeTime = 0;
+ /**
+ * Convert to Standard Workload Format
+ */
+ final boolean swfFormat = false;
+ /**
+ * Convert to Grid Workload Format
+ */
+ final boolean gwfFormat = false;
+ /**
+ * Convert to AWS Lambda Format
+ */
+ final boolean awsLambdaFormat = false;
+
+ public MainServerlessTraceGenerator(String traceFileLoc) throws SecurityException, NoSuchMethodException {
+ if (traceFileLoc.endsWith(".csv")) {
+ CSVReader csv = new CSVReader(traceFileLoc, from, to, true, FaaSJob.class);
+ csv.init(extractAnalyseTrace, scaleUserBehaviour, requiredJobs, triggerType, swfFormat, gwfFormat, awsLambdaFormat,
+ minMinute, maxMinute, minExeTime);
+ }else {
+ System.out.println("Selected file is not in CSV");
+ }
+ }
+ public static void main(String[] args) throws SecurityException, NoSuchMethodException {
+ if (args.length < 1) {
+ System.err.println("No file exist, please specify a location of CSV Azure dataset file");
+ }else {
+ long before = System.currentTimeMillis();
+ new MainServerlessTraceGenerator(args[0]);
+ long totalExeution = System.currentTimeMillis() - before;
+ System.out.println("Generation took: " + totalExeution + "ms");
+ }
+ }
+}
diff --git a/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/serverless/workload/generator/SimpleMutation.java b/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/serverless/workload/generator/SimpleMutation.java
new file mode 100644
index 0000000..ecba933
--- /dev/null
+++ b/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/serverless/workload/generator/SimpleMutation.java
@@ -0,0 +1,17 @@
+package hu.mta.sztaki.lpds.cloud.simulator.helpers.serverless.workload.generator;
+
+import org.apache.commons.math3.exception.MathIllegalArgumentException;
+import org.apache.commons.math3.genetics.Chromosome;
+import org.apache.commons.math3.genetics.MutationPolicy;
+
+public class SimpleMutation implements MutationPolicy{
+
+ @Override
+ public Chromosome mutate(Chromosome original) throws MathIllegalArgumentException {
+ if(!(original instanceof Chromosome)) {
+ System.err.println("Is not type of Chromosome");
+ }
+ return original;
+ }
+
+}
diff --git a/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/trace/FileBasedTraceProducerFactory.java b/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/trace/FileBasedTraceProducerFactory.java
index 8773dc9..33e3888 100644
--- a/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/trace/FileBasedTraceProducerFactory.java
+++ b/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/trace/FileBasedTraceProducerFactory.java
@@ -28,6 +28,7 @@
import java.io.IOException;
import hu.mta.sztaki.lpds.cloud.simulator.helpers.job.Job;
+import hu.mta.sztaki.lpds.cloud.simulator.helpers.trace.file.CSVReader;
import hu.mta.sztaki.lpds.cloud.simulator.helpers.trace.file.GWFReader;
import hu.mta.sztaki.lpds.cloud.simulator.helpers.trace.file.One2HistoryReader;
import hu.mta.sztaki.lpds.cloud.simulator.helpers.trace.file.SWFReader;
@@ -78,6 +79,8 @@ public static GenericTraceProducer getProducerFromFile(String fileName, int from
GenericTraceProducer producer = null;
if (fileName.endsWith(".gwf")) {
producer = new GWFReader(fileName, from, to, furtherjobs, jobType);
+ }else if (fileName.endsWith(".csv")) {
+ producer = new CSVReader(fileName, from, to, furtherjobs, jobType);
} else if (fileName.endsWith(".swf")) {
producer = new SWFReader(fileName, from, to, furtherjobs, jobType);
} else if (fileName.endsWith(".srtg")) {
diff --git a/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/trace/file/CSVReader.java b/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/trace/file/CSVReader.java
new file mode 100644
index 0000000..1fa34d1
--- /dev/null
+++ b/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/trace/file/CSVReader.java
@@ -0,0 +1,110 @@
+/*
+ * ========================================================================
+ * Helper classes to support simulations of large scale distributed systems
+ * ========================================================================
+ *
+ * This file is part of DistSysJavaHelpers.
+ *
+ * DistSysJavaHelpers is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * DistSysJavaHelpers is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ *
+ * (C) Copyright 2022, Gabor Kecskemeti (g.kecskemeti@ljmu.ac.uk)
+ * (C) Copyright 2022, Dilshad H. Sallo (sallo@iit.uni-miskolc.hu)
+ */
+package hu.mta.sztaki.lpds.cloud.simulator.helpers.trace.file;
+
+import java.lang.reflect.InvocationTargetException;
+import java.util.Random;
+import java.util.concurrent.ThreadLocalRandom;
+import hu.mta.sztaki.lpds.cloud.simulator.helpers.job.Job;
+
+public class CSVReader extends TraceFileReaderFoundation{
+
+ static int idCounter = -1;
+ static long epoch = 1557656918;
+
+ public CSVReader(String fileName, int from, int to, boolean allowReadingFurther,
+ Class extends Job> jobType) throws SecurityException, NoSuchMethodException {
+ super("Serverless workload", fileName, from, to, allowReadingFurther, jobType);
+ super.generateFromCSV = true;
+ }
+ public void init(boolean extract, boolean scale, int requiredJobs, String trigger, boolean swf, boolean gwf, boolean aws,
+ int minMinute, int maxMinute, int minExecution) {
+
+ scaleWithUserBehaviour(scale, requiredJobs);
+ if(scale) {
+ exportData(extract);
+ }
+ generate(trigger, minMinute, maxMinute, minExecution);
+ if(swf) {
+ generateSWFFormat();
+ }
+ if(gwf) {
+ generateGWFFormat();
+ }
+ if(aws) {
+ generateAWSTrace();
+ }
+ }
+
+ @Override
+ protected boolean isTraceLine(String line) {
+ return true;
+ }
+
+ @Override
+ protected void metaDataCollector(String line) {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ protected Job createJobFromLine(String jobstring)
+ throws IllegalArgumentException, InstantiationException, IllegalAccessException, InvocationTargetException {
+ if(super.azureAws) {
+ String[] elements = jobstring.trim().split("\\s+");
+ String userID = elements[0];
+ String executableID = elements[1];
+ String triggerGroup = elements[2];
+ String uniQId = String.valueOf(++idCounter);
+ long subTime = Long.parseLong(elements[3]);
+ long max = subTime * 60;
+ long min = max - 60;
+ long subTimeSec = ThreadLocalRandom.current().nextLong(min, max + 1);
+ long submittime = epoch + subTimeSec;
+ long runtime = Long.parseLong(elements[4]);
+ long memory = Long.parseLong(elements[5]);
+ int procs = (int) (runtime / memory);
+ long waitTime = 1;
+ double ppCpu = 0;
+
+ return jobCreator.newInstance(uniQId, submittime, waitTime, runtime, Math.max(1, procs), ppCpu, memory,
+ userID,triggerGroup, executableID, null, 0);
+ }else {
+ String[] elements = jobstring.trim().split(",");
+ String id = elements[0];
+ String executableID = elements[4];
+ long submittime = (long) (Double.parseDouble(elements[13]) / 1000);
+ long runtime = (long) Double.parseDouble(elements[22]);
+ long memory = (long) Double.parseDouble(elements[18]);
+ char cpuValue = elements[2].charAt(1);
+ final int procs = Integer.parseInt(String.valueOf(cpuValue));
+ long waitTime = 0;
+ double ppCpu = 0;
+ //end time can be stop
+ return jobCreator.newInstance(id, submittime, waitTime, runtime, Math.max(1, procs), ppCpu, memory,
+ "U","G", executableID, null, 0);
+ }
+ }
+
+}
diff --git a/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/trace/file/TraceFileReaderFoundation.java b/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/trace/file/TraceFileReaderFoundation.java
index 32bd36c..f1ba89d 100644
--- a/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/trace/file/TraceFileReaderFoundation.java
+++ b/src/main/java/hu/mta/sztaki/lpds/cloud/simulator/helpers/trace/file/TraceFileReaderFoundation.java
@@ -17,7 +17,9 @@
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see .
- *
+ *
+ * (C) Copyright 2022, Gabor Kecskemeti (g.kecskemeti@ljmu.ac.uk)
+ * (C) Copyright 2022, Dilshad H. Sallo (sallo@iit.uni-miskolc.hu)
* (C) Copyright 2016, Gabor Kecskemeti (g.kecskemeti@ljmu.ac.uk)
* (C) Copyright 2012-2015, Gabor Kecskemeti (kecskemeti.gabor@sztaki.mta.hu)
*/
@@ -26,14 +28,28 @@
import java.io.BufferedReader;
import java.io.File;
+import java.io.FileNotFoundException;
import java.io.FileReader;
+import java.io.IOException;
+import java.io.RandomAccessFile;
import java.lang.reflect.InvocationTargetException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Calendar;
+import java.util.Collections;
import java.util.HashMap;
+import java.util.Iterator;
import java.util.List;
+import java.util.Map;
+import java.util.PriorityQueue;
+import java.util.TreeMap;
+import java.util.concurrent.ThreadLocalRandom;
import hu.mta.sztaki.lpds.cloud.simulator.helpers.job.Job;
+import hu.mta.sztaki.lpds.cloud.simulator.helpers.job.JobListAnalyser;
+import hu.mta.sztaki.lpds.cloud.simulator.helpers.serverless.workload.generator.GenerateExecutionTime;
+import hu.mta.sztaki.lpds.cloud.simulator.helpers.serverless.workload.generator.GenerateMemory;
import hu.mta.sztaki.lpds.cloud.simulator.helpers.trace.TraceProducerFoundation;
/**
@@ -87,7 +103,75 @@ public abstract class TraceFileReaderFoundation extends TraceProducerFoundation
* reading has been completed.
*/
private int lineIdx = -1;
-
+ /**
+ * Store number of each user with his services and invocations
+ */
+ private Map> userSerInvo;
+ /**
+ * Store number of required user with his services and invocations
+ */
+ private Map> userSerInvoRequired;
+ /**
+ * Information of header in CSV Azure file
+ */
+ private String[] headerComponents;
+ /**
+ * Total number of jobs (invocations)
+ */
+ private float totInvocDay = 0;
+ /**
+ * Store percentage of invocations (users)
+ */
+ private HashMap invoPercentage;
+ /**
+ * Approximate tasks to generate
+ */
+ private int totalUserGenerator = 0;
+ /**
+ * Source and destination path
+ */
+ Path path, dest = null;
+ /**
+ * For approach of scaling workload with user behavior
+ */
+ private boolean scaleWithUserBehaviour = false;
+ /**
+ * Users map to store original and map users
+ */
+ private TreeMap users = new TreeMap();
+ /**
+ * Executable map to store original and map executable
+ */
+ private TreeMap executable = new TreeMap();
+ /**
+ * Counter for user id
+ */
+ private int userid = 0;
+ /**
+ * Counter for executable id
+ */
+ private int exeid = 0;
+ /**
+ * Range of minutes in a day
+ */
+ private int minMinute,maxMinute = 1440;
+ /**
+ * Generate from CSV file
+ */
+ protected boolean generateFromCSV = false;
+ /**
+ * Trigger type
+ */
+ private String trigger = null;
+ /**
+ * Min execution time (s)
+ */
+ private int minExecution = 0;
+ /**
+ * Read Azure trace (true)
+ * Read AWS trace (false)
+ */
+ protected boolean azureAws = true;
/**
* Initializes the generic fields of all line based trace file readers.
*
@@ -126,6 +210,7 @@ protected TraceFileReaderFoundation(String traceKind, String fileName, int from,
this.from = from;
this.to = to;
furtherReadable = allowReadingFurther;
+ path = Paths.get(fileName);
}
/**
@@ -135,8 +220,194 @@ protected TraceFileReaderFoundation(String traceKind, String fileName, int from,
* @param count
* the number of jobs to be read from the current location.
*/
- private void readTrace(int count) {
- try {
+ private void readTrace(int count) {
+ if(generateFromCSV) {
+ if(scaleWithUserBehaviour) {
+ try {
+ System.err.println(traceKind + " trace file reader starts for: " + toBeRead + " at "
+ + Calendar.getInstance().getTime());
+ invoPercentage.clear();
+ currentlyOffered = new ArrayList();
+ fastCache = new HashMap();
+ String line = null;
+ final int aveCol = 1444;
+ final int mavCol = aveCol + 12;
+ final int countCol = 1445;
+ final int startColum = 4;
+ final int readTotalCol = 1440;
+ final int readTotalPerce = startColum + readTotalCol + 1;
+ final int readTotoMemory = readTotalPerce + 10;
+ final int userIndex = 0;
+ actualReader = new BufferedReader(new FileReader(toBeRead));
+ line = actualReader.readLine();
+ headerComponents = line.trim().split(",");
+
+ while((line = actualReader.readLine()) != null) {
+ String[] rowData = line.trim().split(",");
+ String traceUser = rowData[userIndex];
+ HashMap tempNumSerInvo;
+
+ int max = Integer.parseInt(rowData[startColum + readTotalCol+3]);
+ int ave = Integer.parseInt(rowData[startColum + readTotalCol]);
+ int noInvocations = Integer.parseInt(rowData[startColum + readTotalCol+1]);
+
+ if(userSerInvoRequired.containsKey(traceUser)) {
+ tempNumSerInvo = new HashMap();
+ tempNumSerInvo = userSerInvoRequired.get(traceUser);
+
+ Iterator siValues = tempNumSerInvo.entrySet().iterator();
+ Map.Entry sivalues = (Map.Entry)siValues.next();
+ int valueOfInvocation = (int) sivalues.getValue();
+ int valueOfServices = (int) sivalues.getKey();
+
+ valueOfServices = valueOfServices > valueOfInvocation ? 5 : valueOfServices;
+ if(valueOfInvocation == 0 || traceUser == null) {
+ continue;
+ }else {
+ int invoDueServices = (int) Math.round(valueOfInvocation / valueOfServices);
+ if(max != 0 && ave != 0 && invoDueServices > 2) {
+ GenerateExecutionTime.GeneExecTime(String.valueOf(invoDueServices), rowData[readTotalPerce + 3] , rowData[readTotalPerce+4],
+ rowData[readTotalPerce+5], rowData[readTotalPerce+6], rowData[readTotalPerce+7], rowData[readTotalPerce+8],
+ rowData[readTotalPerce +9], rowData[readTotalPerce+2]);
+ PriorityQueue totExeTime = GenerateExecutionTime.getAllExecTime();
+
+ GenerateMemory.GeneMemory(String.valueOf(invoDueServices), rowData[readTotoMemory+2], rowData[readTotoMemory+3],
+ rowData[readTotoMemory+4], rowData[readTotoMemory+5], rowData[readTotoMemory+6],
+ rowData[readTotoMemory+7], rowData[readTotoMemory+8], rowData[readTotoMemory+9]);
+ PriorityQueue totalMemory= GenerateMemory.getAllMemory();
+
+ long minute = ThreadLocalRandom.current().nextInt(minMinute, maxMinute+1);
+ String event = rowData[userIndex+3];
+ long exeTime = 0;
+ long memory = 0;
+
+ for(int i = 0; i ();
+ fastCache = new HashMap();
+
+ String line = null;
+
+ final int startColum = 4;
+ final int readTotalCol = 1440;
+ final int readTotalPerce = startColum + readTotalCol + 1;
+ final int readTotoMemory = readTotalPerce + 10;
+ actualReader = new BufferedReader(new FileReader(toBeRead));
+ line = actualReader.readLine();
+ headerComponents = line.trim().split(",");
+ //determine which trace will read
+ azureAws = ( headerComponents.length > 30) ? true : false;
+
+ for(int i=0; i 0 && (line = actualReader.readLine()) != null) {
+ String[] rowData = line.trim().split(",");
+
+ int max = Integer.parseInt(rowData[startColum + readTotalCol+3]);
+ int ave = Integer.parseInt(rowData[startColum + readTotalCol]);
+ int noInvocations = Integer.parseInt(rowData[startColum + readTotalCol +1]);
+ if(max != 0 && ave != 0 && noInvocations >= 10) {
+
+ GenerateExecutionTime.GeneExecTime(rowData[readTotalPerce], rowData[readTotalPerce + 3] , rowData[readTotalPerce+4],
+ rowData[readTotalPerce+5], rowData[readTotalPerce+6], rowData[readTotalPerce+7], rowData[readTotalPerce+8],
+ rowData[readTotalPerce +9], rowData[readTotalPerce+2]);
+ PriorityQueue totExeTime = GenerateExecutionTime.getAllExecTime();
+
+ GenerateMemory.GeneMemory(rowData[readTotoMemory], rowData[readTotoMemory+2], rowData[readTotoMemory+3],
+ rowData[readTotoMemory+4], rowData[readTotoMemory+5], rowData[readTotoMemory+6],
+ rowData[readTotoMemory+7], rowData[readTotoMemory+8], rowData[readTotoMemory+9]);
+ PriorityQueue totalMemory= GenerateMemory.getAllMemory();
+ long exeTime = 0;
+ for(int j = (startColum + minMinute) ; j <(startColum + maxMinute); j++ ) {
+ int currentRow = Integer.parseInt(rowData[j]);
+ String event = rowData[3];
+ if(currentRow == 0)
+ continue;
+ for(int i = 0; i 0 && (line = actualReader.readLine()) != null) {
+ Job toAdd = createJobFromLine(line);
+ if (toAdd == null)
+ continue;
+ fastCache.put(toAdd.getId(), toAdd);
+ count--;
+ }//end-while-aws
+ currentlyOffered.addAll(fastCache.values());
+ fastCache.clear();
+ }//end-read-aws
+ System.out.println("No of generated Jobs is : " + currentlyOffered.size());
+ } catch (Exception e) {
+ throw new RuntimeException("Error in line: " + lineIdx, e);
+ }
+
+ }//end-CSV
+ }else {
+ try {
if (!furtherReadable && lineIdx >= to) {
throw new Exception("Was set to stop after reaching the 'to' item");
}
@@ -179,11 +450,421 @@ private void readTrace(int count) {
currentlyOffered.addAll(fastCache.values());
System.err.println(traceKind + " trace file reader stops for: " + toBeRead + " at "
+ Calendar.getInstance().getTime());
+
} catch (Exception e) {
throw new RuntimeException("Error in line: " + lineIdx, e);
}
+ }}
+ protected void scaleWithUserBehaviour(boolean s, int numberJobs) {
+ if(s) {
+ totalUserGenerator = numberJobs;
+ scaleWithUserBehaviour = s;
+ calUserServiceInvocations();
+ calculatePercentage();
+ calculateRequiredInvocations();
+ }
}
+ protected void generate(String trigger, int minMinute, int maxMinute, int minExecution) {
+ this.minMinute = minMinute;
+ this.maxMinute = maxMinute;
+ this.trigger = trigger;
+ this.minExecution = minExecution;
+ generateFromCSV = true;
+ readTrace(to-from);
+ }
+ /**
+ * Calculate services and number of invocations for each user
+ */
+ private void calUserServiceInvocations() {
+ try {
+ userSerInvo = new HashMap>();
+ String line = null;
+
+ final int startColum = 4; //Column to start
+ final int readTotalCol = 1440; //Total columns
+ final int totalCount = startColum + readTotalCol + 1; //Columns contains total number of jobs and percentiles
+
+ actualReader = new BufferedReader(new FileReader(toBeRead));
+ line = actualReader.readLine();//Header
+ headerComponents = line.trim().split(",");
+
+ while((line = actualReader.readLine()) != null) {
+
+ String[] rowData = line.trim().split(",");
+ String user = rowData[0];//Get a user(owner hash-id)
+ int invocations = Integer.parseInt(rowData[totalCount]);//Get total invocations for a user
+ totInvocDay = totInvocDay + invocations;
+
+ HashMap dupUserInocationsOld; //Old data (user and invocations)
+ HashMap dupUserInocationsNew;//New data
+
+ if(!userSerInvo.containsKey(user)) {
+ dupUserInocationsOld = new HashMap();
+ dupUserInocationsOld.put(1, invocations);
+ userSerInvo.put(user, dupUserInocationsOld);
+ }else {
+ dupUserInocationsNew = new HashMap();
+ dupUserInocationsNew = userSerInvo.get(user); //Get service and old invocations for this user
+ userSerInvo.remove(user);//Delete to be updated
+
+ for (Map.Entry me : dupUserInocationsNew.entrySet())
+ {
+ int key = me.getKey(); //No of services
+ int currentValue = me.getValue();//No of new invocations
+
+ int newValue = currentValue + invocations;
+ key = key + 1;
+ dupUserInocationsNew.clear();
+ dupUserInocationsNew.put(key, newValue);
+ }
+ userSerInvo.put(user, dupUserInocationsNew);//user with total invocations
+ }
+ }//end-while
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }//end-calUserServiceInvocations
+ /**
+ * Calculate percentage of invocations for users
+ */
+ private void calculatePercentage() {
+ invoPercentage = new HashMap();
+ HashMap serviceInvocationDetails;
+ Iterator allUserInfo = userSerInvo.entrySet().iterator();//Get all info
+
+ while (allUserInfo.hasNext()) {
+
+ Map.Entry alluserinfo = (Map.Entry) allUserInfo.next();
+ serviceInvocationDetails = new HashMap();
+ serviceInvocationDetails = (HashMap) alluserinfo.getValue();
+
+ //Iterator for values of invocations
+ Iterator dupInvo = serviceInvocationDetails.entrySet().iterator();
+
+ Map.Entry invocationValue = (Map.Entry)dupInvo.next();
+
+ int value = (int) invocationValue.getValue();//get value of invocations
+ float perValue = (float) ((value / totInvocDay) * 100);
+ invoPercentage.put((String) alluserinfo.getKey(), perValue);
+ }
+ }
+ /**
+ * Calculate Required invocations based on a selected number of jobs by user
+ */
+ private void calculateRequiredInvocations() {
+ Iterator allUsersServicesInvocations = userSerInvo.entrySet().iterator();
+ userSerInvoRequired = new HashMap>();
+ HashMap servicesInvocations;
+ while (allUsersServicesInvocations.hasNext()) {
+ Map.Entry alluserserviceinvocations = (Map.Entry) allUsersServicesInvocations.next();
+
+ servicesInvocations = new HashMap();
+ servicesInvocations = (HashMap) alluserserviceinvocations.getValue();//Get services number and invocations
+
+ Iterator serviceinvocation = servicesInvocations.entrySet().iterator();
+ String user = (String) alluserserviceinvocations.getKey();
+ float userPercentage = invoPercentage.get(user); // get percentage for this user
+
+ while (serviceinvocation.hasNext()) {
+ Map.Entry serviceInvocationNumber = (Map.Entry)serviceinvocation.next();
+ int serviceNumber = (int) serviceInvocationNumber.getKey();
+ int newInvocationNumber = (int) (((totalUserGenerator * userPercentage) / 100));
+ HashMap newServiceInvocation = new HashMap();
+ newServiceInvocation.put(serviceNumber, newInvocationNumber);
+ userSerInvoRequired.put((String) alluserserviceinvocations.getKey(), newServiceInvocation);
+ }
+ }
+ }
+ /**
+ * Provide user id
+ *
+ * @param original hash-id
+ * @return user id
+ */
+ private String getUser(String u) {
+ if(!users.containsKey(u)) {
+ users.put(u, "U" + userid);
+ userid++;
+ }
+ return users.get(u);
+ }
+ /**
+ * Provide executable id
+ *
+ * @param original executable
+ * @return executable id
+ */
+ private String getExecutable(String e) {
+ if(!executable.containsKey(e)) {
+ executable.put(e, "X" + exeid);
+ exeid++;
+ }
+ return executable.get(e);
+ }
+ /*
+ * Extract information related to users
+ */
+ @SuppressWarnings("resource")
+ protected void exportData(boolean extract) {
+ if(extract) {
+ try {
+ System.err.println("Total users' tasks file is extracted to "+ path.getParent()+ "\\UsersInformation.csv");
+ RandomAccessFile raf;
+ raf = new RandomAccessFile(path.getParent() +"/"+"UsersInformation.csv", "rw");
+ raf.writeBytes("User ID"+ "," + "Number of Services" + "," + " Number of Invocations" + "\n");
+ Iterator allUserDetails = userSerInvo.entrySet().iterator();
+ HashMap serInvoDetails;
+ while (allUserDetails.hasNext()) {
+ Map.Entry alluserdetails = (Map.Entry) allUserDetails.next();
+ serInvoDetails = new HashMap();
+ serInvoDetails = (HashMap) alluserdetails.getValue();
+
+ Iterator dupInvo = serInvoDetails.entrySet().iterator();
+ while (dupInvo.hasNext()) {
+ Map.Entry t = (Map.Entry)dupInvo.next();
+ raf.writeBytes(getUser((String) alluserdetails.getKey()) + "," + t.getKey() + "," + t.getValue() +"\n");
+ }//end-while
+ }//end-while
+ } catch (IOException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }//end-try
+ try {
+ System.err.println("Users' invocations and services required file is extracted to "+ path.getParent()+"\\UsersServicesInvocationsRequired.csv");
+ RandomAccessFile raf = new RandomAccessFile(path.getParent() +"/"+"UsersServicesInvocationsRequired.csv", "rw");
+ raf.writeBytes("User ID"+ "," + "Number of required Services" + "," + " Number of required Invocations" + "\n");
+ Iterator allUserDetails = userSerInvoRequired.entrySet().iterator();
+ HashMap serInvoDetails;
+ while (allUserDetails.hasNext()) {
+
+ Map.Entry alluserdetails = (Map.Entry) allUserDetails.next();
+ serInvoDetails = new HashMap();
+ serInvoDetails = (HashMap) alluserdetails.getValue();
+
+ Iterator dupInvo = serInvoDetails.entrySet().iterator();
+ while (dupInvo.hasNext()) {
+ Map.Entry t = (Map.Entry)dupInvo.next();
+ raf.writeBytes(getUser((String) alluserdetails.getKey()) + "," + t.getKey() + "," + t.getValue() +"\n");
+ }
+ }
+ } catch (IOException e) {
+
+ e.printStackTrace();
+ }
+ }//end-if
+ }//end-exportData
+ /**
+ * Convert generated tasks to SWF format
+ */
+ protected void generateSWFFormat() {
+ try {
+ Collections.sort(currentlyOffered, JobListAnalyser.submitTimeComparator);
+ System.err.println("SWF format trace is extracted to "+ path.getParent()+"\\Trace.swf");
+ RandomAccessFile raf = new RandomAccessFile(path.getParent() +"/"+"Trace.swf", "rw");
+ //Header info
+ raf.writeBytes(";\n");
+ raf.writeBytes(";\t Serverless workload generator by Dilshad H. Sallo and Gabor Kecskemeti \n");
+ raf.writeBytes(";\t Generated Date: " + java.time.LocalDate.now() + " \n");
+ raf.writeBytes(";\t Standard Workload Format \n");
+ raf.writeBytes(";\n");
+ raf.writeBytes(";\t1:\t Job Number\n;\t2:\t Submit Time\n;\t3:\t Wait Time\n;\t4:\t Run Time\n;\t5:\t Number of Allocated Processors\n;\t6:\t CPU Used\n"
+ + ";\t7:\t Used Memory\n;\t8:\t Processors Required\n;\t9:\t Requested Time\n;\t10:\t Requested Memory\n;\t11:\t Status\n;\t12:\t User ID\n;\t13:\t Group ID\n"
+ + ";\t14:\t Executable Number\n;\t15:\t Queue Number\n;\t16:\t Partition Number\n;\t17:\t Preceding Job Number\n;\t18:\t Think Time\n;\n");
+ for (int i = 0; i < currentlyOffered.size(); i++) {
+ String user =currentlyOffered.get(i).user;
+ String exe = currentlyOffered.get(i).executable;
+ final double runTime = currentlyOffered.get(i).getExectimeSecs()/1000D;
+ //final double rumRemind = currentlyOffered.get(i).getExectimeSecs()/1000;
+ user = user.replace("U", "");
+ exe = exe.replace("X","");
+ //1 job id
+ raf.writeBytes("\t"+ currentlyOffered.get(i).getId()
+ // 2 submit time
+ + " \t " + currentlyOffered.get(i).getSubmittimeSecs() +
+ // 3 wait time
+ " \t " + currentlyOffered.get(i).getQueuetimeSecs()
+ // 4 run time
+ + " \t " + Double.toString(runTime) +
+ // 5 Number of Allocated Processors (job)
+ " \t " + currentlyOffered.get(i).nprocs +
+ // 6 Average CPU Time Used (0)
+ " \t " + "0" +
+ // 7 Used Memory
+ " \t " + currentlyOffered.get(i).usedMemory
+ // 8 Processor required
+ + " \t " + "-1" +
+ // 9 user estimate (Requested Time) use the same as
+ " \t " + "-1" +
+ // 10 Requested Memory (Memory required)
+ " \t " + "-1" +
+ //11 Status
+ " \t " + "1" +
+ //12 User ID
+ " \t " + Integer.parseInt(user)+
+ //13 group id
+ " \t " + currentlyOffered.get(i).group.length() +
+ //14 Executable (Application) Number
+ " \t " + Integer.parseInt(exe) +
+ //15 Queue Number (1 or 0)
+ " \t " + "1" +
+ // 16 Partition Number
+ " \t " + "-1"+
+ // 17 Preceding Job Number
+ " \t " + "-1" +
+ // 18 Think Time from Preceding Job
+ "\t " + " -1" + "\n");
+ }
+ } catch (FileNotFoundException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ } catch (IOException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ }
+ /**
+ * Convert generated tasks to GWF format
+ */
+ protected void generateGWFFormat() {
+ try {
+ Collections.sort(currentlyOffered, JobListAnalyser.submitTimeComparator);
+ System.err.println("SWF format trace is extracted to "+ path.getParent()+"\\Trace.gwf");
+ RandomAccessFile raf = new RandomAccessFile(path.getParent() +"/"+"Trace.gwf", "rw");
+ //Header info
+ raf.writeBytes("#\n");
+ raf.writeBytes("#\t Serverless workload generator by Dilshad H. Sallo and Gabor Kecskemeti \n");
+ raf.writeBytes("#\t Generated Date: " + java.time.LocalDate.now() + " \n");
+ raf.writeBytes("#\t Grid Workload Format \n");
+ raf.writeBytes("#\n");
+ raf.writeBytes("# 1 JobID \n# 2 SubmitTime\t in seconds\n# 3 WaitTime\t in seconds\n# 4 RunTime\t runtime measured in wallclock seconds\n"
+ + "# 5 NProcs\tnumber of allocated processors\n# 6 AverageCPUTimeUsed\t average of CPU time over all allocated processors\n"
+ + "# 7 Used Memory\t average per processor in kilobytes\n# 8 ReqNProcs\t requested number of processors\n# 9 ReqTime\t requested time measured in wallclock seconds\n"
+ + "# 10 ReqMemory\t requested memory (average per processor)\n# 11 Status\t job completed = 1, job failed = 0,\n# "
+ + "12 UserID\t string identifier for user\n# 13 GroupID\t string identifier for group user belongs to\n# "
+ + "14 ExecutableID\t name of executable\n# 15 QueueID\t string identifier for queue\n# 16 PartitionID\t string identifier for partition\n# "
+ + "17 OrigSiteID\t string identifier for submission site\n# 18 LastRunSiteID\t string identifier for execution site\n# "
+ + "19 JobStructure\t single job = UNITARY, composite job = BoT\n# 20 JobStructureParams\t if JobStructure = BoT, contains batch identifier\n# "
+ + "21 UsedNetwork\t used network resources in kilobytes/second\n# 22 UsedLocalDiskSpace\t in megabytes\n# "
+ + "23 UsedResources\t list of comma-separated generic resources (ResourceDescription:Consumption)\n# "
+ + "24 ReqPlatform\t CPUArchitecture,OS,OSVersion\n# 25 ReqNetwork\t in kilobytes/second\n# 26 ReqLocalDiskSpace\t in megabytes\n"
+ + "# 27 ReqResources\t list of comma-separated generic resources (ResourceDescription:Consumption)\n# "
+ + "28 VOID\t identifier for Virtual Organization\n# 29 ProjectID\t identifier for project\n#\n#(fields contain -1 if not available)\n");
+ raf.writeBytes("#\n");
+ raf.writeBytes("#\n");
+ ///check these fields
+ for (int i = 0; i < currentlyOffered.size(); i++) {
+ final double runTime = currentlyOffered.get(i).getExectimeSecs()/1000D;
+ raf.writeBytes(currentlyOffered.get(i).getId()
+ + " \t " + currentlyOffered.get(i).getSubmittimeSecs()
+ + " \t " + currentlyOffered.get(i).getQueuetimeSecs()
+ + " \t " + Double.toString(runTime)
+ + " \t " + currentlyOffered.get(i).nprocs
+ + " \t " + currentlyOffered.get(i).perProcCPUTime
+ + " \t " + currentlyOffered.get(i).usedMemory
+ + " \t " +currentlyOffered.get(i).nprocs
+ + " \t " + "-1"
+ + " \t " + "-1"
+ + " \t " + "1"
+ + " \t " + currentlyOffered.get(i).user
+ + " \t " + currentlyOffered.get(i).group
+ + " \t " + currentlyOffered.get(i).executable
+ + " \t " + "-1"
+ + " \t " + "-1"
+ + " \t " + "-1"
+ + " \t " + "-1"
+ + " \t " + "-1"
+ + " \t " + "UNITARY"
+ + " \t " + "-1"
+ + " \t " + "-1"
+ +" \t " + "-1"
+ + " \t " + "-1"
+ + " \t " + "-1"
+ + " \t " + "-1"
+ + " \t " + "-1"
+ + " \t " + "-1"
+ + " \t " + "-1" + "\n");
+ }
+ } catch (FileNotFoundException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ } catch (IOException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ }//endGWFile
+ /**
+ * Convert generated task to AWS Lambda format
+ */
+ protected void generateAWSTrace() {
+ try {
+ System.err.println("AWS trace is extracted to "+ path.getParent()+"\\AWSTrace.csv");
+ RandomAccessFile raf = new RandomAccessFile(path.getParent() +"/"+ "AWSTrace.csv", "rw");
+ raf.writeBytes("" + "," + "is_cold" + "," + "cpu_info" + "," + "inst_id" + "," + "inst_priv_ip" + "," + "new_id" + "," +
+ "exist_id" + "," +"uptime" + "," + "vm_id" + "," + "vm_priv_ip" + "," + "vm_pub_ip" + "," + "start_time" + "," +
+ "end_time" + "," + "elapsed_time" + "," + "aws_duration" + "," + "aws_billed_duration" + "," + "aws_max_mem" + "," +
+ "io_speed" + "," + "client_start_time" + "," + "client_end_time" + "," + "client_elapsed_time" + "," + "\n");
+
+ Collections.sort(currentlyOffered, JobListAnalyser.submitTimeComparator);
+ for (int i = 0; i