Commit c2b7ba8e authored by gustavorag's avatar gustavorag
Browse files

Initial commit.

parent 1b226237
fogbow_request=Glue2RAM >= 1024
fogbow_request_image=fogbow-ubuntu
fogbow_request_public_key={your_public_key}
infra_fogbow_manager_url=http://localhost:8182
infra_fogbow_token_path_file=/yourCertFolder
infra_fogbow_private_key_path=/youPrivateKeyFolder
infra_fogbow_username=fogbow
env_upload_folder_path=/pathOfFileToUploadTest
env_download_folder_path=/pathToFileDownloadTest
env_remote_folder_path=/tmp/fogbowTesterEnv/
env_scp_file_name=fileToDownlaodUploadTest (must be inside env_upload_folder_path)
env_local_command_interpreter=/bin/bash
statistic_datastore_url=jdbc:h2:file:/{your_data_base_file_path}/statistic
request_datastore_url=jdbc:h2:file:/{your_data_base_file_path}/requests
rest_server_port =
#In seconds
test_connection_timeout = 180
execution_configuration = /{your_fogbow-exerciser-env_path}/ExecutionConfigurations.json
json_file_folder_output=/{your_fogbow-exerciser-env_path}/result/json
result_file_folder_output=/{your_fogbow-exerciser-env_path}/result
\ No newline at end of file
# Root logger option
log4j.rootLogger=DEBUG, stdout
# Direct log messages to stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.Target=System.out
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} -[%x] %-5p %c{1}:%L - %m%n
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>ReverseTunnelTester</groupId>
<artifactId>ReverseTunnelTester</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>ReverseTunnelTester</name>
<description>TunnelReverseTester</description>
<repositories>
<repository>
<id>maven-restlet</id>
<name>Restlet repository</name>
<url>http://maven.restlet.com</url>
</repository>
</repositories>
<dependencies>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<version>2.3</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>4.4</version>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<version>1.8.4</version>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
</dependency>
<dependency>
<groupId>org.fogbowcloud</groupId>
<artifactId>manager</artifactId>
<version>0.0.1-SNAPSHOT</version>
<exclusions>
<exclusion>
<groupId>org.opennebula</groupId>
<artifactId>client</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
<version>20090211</version>
<type>pom.lastUpdated</type>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.2.4</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpmime</artifactId>
<version>4.5</version>
</dependency>
<dependency>
<groupId>org.restlet.jse</groupId>
<artifactId>org.restlet</artifactId>
<version>2.3.4</version>
</dependency>
<dependency>
<groupId>org.restlet.jse</groupId>
<artifactId>org.restlet.ext.jackson</artifactId>
<version>2.3.4</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-dbcp2</artifactId>
<version>2.0.1</version>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>9.3-1102-jdbc41</version>
</dependency>
<dependency>
<groupId>org.jsoup</groupId>
<artifactId>jsoup</artifactId>
<version>1.8.2</version>
</dependency>
</dependencies>
</project>
\ No newline at end of file
package org.fogbowcloud.statistic.tester.core;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.log4j.Logger;
import org.fogbowcloud.statistic.tester.core.util.AppConfiguration;
import org.fogbowcloud.statistic.tester.core.util.AppUtils;
import org.fogbowcloud.statistic.tester.core.util.DateUtils;
import org.fogbowcloud.statistic.tester.infrastructure.FogbowInfrastructureProvider;
import org.fogbowcloud.statistic.tester.infrastructure.InfrastructureProvider;
import org.fogbowcloud.statistic.tester.model.ExecutionConfiguration;
import org.fogbowcloud.statistic.tester.model.Resource;
import org.fogbowcloud.statistic.tester.model.Resource.ResourceState;
import org.fogbowcloud.statistic.tester.model.Statistic;
import org.fogbowcloud.statistic.tester.model.Statistic.StatisticType;
import org.fogbowcloud.statistic.tester.service.StatisticService;
public class ExecutionManager {
private static final Logger LOGGER = Logger.getLogger(ExecutionManager.class);
private ExecutionConfiguration execConfig;
private Properties properties;
private List<Statistic> statistics = new ArrayList<Statistic>();
private List<Resource> resources = new ArrayList<Resource>();
private int askedResources = 0;
private int resolvedResources = 0;
private int completedResourceTests = 0;
private ExecutorService askForResourcesExecutor = Executors.newCachedThreadPool();
private DateUtils dateUtils = new DateUtils();
private InfrastructureProvider infraProvider;
private StatisticService statsService;
private ReentrantReadWriteLock statisticsLock = new ReentrantReadWriteLock();
private ReentrantReadWriteLock globalStatisticsLock = new ReentrantReadWriteLock();
private Statistic globalExecution = null;
public ExecutionManager(Properties properties, StatisticService statsService, ExecutionConfiguration execConfig)
throws Exception {
this.properties = properties;
this.statsService = statsService;
this.execConfig = execConfig;
this.infraProvider = new FogbowInfrastructureProvider(properties);
}
public void execute() throws InterruptedException {
while (completedResourceTests < execConfig.getNumberOfResources()) {
LOGGER.debug("Executing ExecutionManager cicle.");
if (globalExecution == null) {
initGlobalStatistics();
}
resolveResources();
if (resources.size() > 0 && resolvedResources == execConfig.getNumberOfResources()) {
while (completedResourceTests < execConfig.getNumberOfResources()) {
for (Resource r : new ArrayList<Resource>(resources)) {
if (ResourceState.IDLE.equals(r.getState())) {
executeResourceTest(r);
}
}
Thread.sleep(2000);
}
}
try {
statisticsLock.writeLock().lock();
if (!statistics.isEmpty()) {
// Add new statistics generated in this loop.
statsService.saveStatistics(statistics);
statistics = new ArrayList<Statistic>();
}
} finally {
statisticsLock.writeLock().unlock();
}
try {
globalStatisticsLock.writeLock().lock();
statsService.updateStatistic(globalExecution);
} finally {
globalStatisticsLock.writeLock().unlock();
}
Thread.sleep(1000);
}
try {
statisticsLock.writeLock().lock();
if (!statistics.isEmpty()) {
// Add new statistics generated in this loop.
statsService.saveStatistics(statistics);
statistics = new ArrayList<Statistic>();
}
} finally {
statisticsLock.writeLock().unlock();
}
LOGGER.debug("Resources tests execution completed");
globalExecution.putInformation(Statistic.INFO_END_TIME, String.valueOf(dateUtils.currentTimeMillis()));
statsService.updateStatistic(globalExecution);
// Wait for all threads
askForResourcesExecutor.shutdown();
while (!askForResourcesExecutor.isTerminated()) {
Thread.sleep(1000);
}
}
private void initGlobalStatistics() {
globalExecution = new Statistic(StatisticType.GLOBAL_EXECUTION,
System.getProperty(AppConfiguration.TEST_EXECUTION_ID));
globalExecution.putInformation(Statistic.INFO_START_TIME, String.valueOf(dateUtils.currentTimeMillis()));
globalExecution.putInformation(Statistic.INFO_GLOBAL_FAILED_RESOURCES, "0");
globalExecution.putInformation(Statistic.INFO_GLOBAL_SUCCESS_RESOURCES, "0");
globalExecution.putInformation(Statistic.INFO_GLOBAL_NUMBER_OF_RESOURCES,
String.valueOf(execConfig.getNumberOfResources()));
globalExecution.putInformation(Statistic.INFO_GLOBAL_UPLOAD_TEST, String.valueOf(execConfig.isExecuteUpload()));
globalExecution.putInformation(Statistic.INFO_GLOBAL_DOWNLOAD_TEST,
String.valueOf(execConfig.isExecuteDownload()));
globalExecution.putInformation(Statistic.INFO_GLOBAL_EXECUTIONS_PER_RESOURCE,
String.valueOf(execConfig.getExecutionsPerResource()));
statsService.saveStatistic(globalExecution);
}
private void resolveResources() {
LOGGER.debug("Resolving resources requests.");
while (askedResources < execConfig.getNumberOfResources()) {
askedResources++;
askForResourcesExecutor.submit(new Runnable() {
public void run() {
Statistic stats = new Statistic(StatisticType.REQUEST_RESOURCE, globalExecution.getExecutionId());
try {
stats.putInformation(Statistic.INFO_START_TIME, String.valueOf(dateUtils.currentTimeMillis()));
String fogbowRequiriments = properties.getProperty(AppConfiguration.FOGBOW_REQUEST)
.replace(AppConfiguration.LOCAL_TAG, execConfig.getLocal());
Resource r = infraProvider.requestResource(fogbowRequiriments);
r.putMetadata(Resource.METADATA_CREATE_TIME, String.valueOf(dateUtils.currentTimeMillis()));
r.setState(ResourceState.IDLE);
LOGGER.debug("Adding new Resource");
resources.add(r);
resolvedResources++;
stats.setResourceId(r.getId());
copyMetadatasFromResource(stats, r);
} catch (Throwable e) {
stats.putInformation(Statistic.INFO_ERROR, e.getMessage());
completedResourceTests++;
resolvedResources++;
} finally {
stats.putInformation(Statistic.INFO_END_TIME, String.valueOf(dateUtils.currentTimeMillis()));
try {
statisticsLock.writeLock().lock();
statistics.add(stats);
} finally {
statisticsLock.writeLock().unlock();
}
}
}
});
}
}
private void executeResourceTest(final Resource resource) {
resource.setState(ResourceState.RUNNING);
askForResourcesExecutor.submit(new Runnable() {
public void run() {
LOGGER.debug("Init execute Resource test - ResourceID = "
+ resource.getId());
Integer executions;
String actualExecutions = resource.getMetadataValue(Resource.METADATA_USE_NUMBER);
if (actualExecutions == null || actualExecutions.isEmpty() || !AppUtils.isNumeric(actualExecutions)) {
executions = new Integer(0);
} else {
executions = new Integer(Integer.valueOf(actualExecutions));
}
if (executions.intValue() < execConfig.getExecutionsPerResource()) {
executions++;
LOGGER.debug("Executing Resource test - Execution [" + executions + "] - ResourceID = "
+ resource.getId());
resource.putMetadata(Resource.METADATA_USE_NUMBER, String.valueOf(executions));
checkConnection(resource);
if (ResourceState.FAILED.equals(resource.getState())) {
LOGGER.error("Resource test completed with fail: ResourceID = " + resource.getId());
deleteResource(resource);
resources.remove(resource);
try {
globalStatisticsLock.writeLock().lock();
incrementStatisticMetadata(globalExecution, Statistic.INFO_GLOBAL_FAILED_RESOURCES);
} finally {
globalStatisticsLock.writeLock().unlock();
}
completedResourceTests++;
return;
}
if (execConfig.isExecuteUpload()) {
testUpload(resource);
}
if (execConfig.isExecuteDownload()) {
testDownload(resource);
}
resource.setState(ResourceState.IDLE);
LOGGER.debug("Finishing Resource test: ResourceID = " + resource.getId());
} else {
LOGGER.debug("Resource test completed: ResourceID = " + resource.getId());
deleteResource(resource);
resources.remove(resource);
try {
globalStatisticsLock.writeLock().lock();
incrementStatisticMetadata(globalExecution, Statistic.INFO_GLOBAL_SUCCESS_RESOURCES);
} finally {
globalStatisticsLock.writeLock().unlock();
}
completedResourceTests++;
}
}
});
}
private void testDownload(final Resource resource) {
LOGGER.debug("Executing Resource Download test: ResourceID = " + resource.getId());
Statistic downloadStatistic = new Statistic(StatisticType.FILE_DOWNLOAD, globalExecution.getExecutionId());
downloadStatistic.putInformation(Statistic.INFO_START_TIME, String.valueOf(dateUtils.currentTimeMillis()));
try {
resource.executeScpDownload();
} catch (Exception e) {
LOGGER.error("Error while execute Resource Download test: ResourceID = " + resource.getId(), e);
downloadStatistic.putInformation(Statistic.INFO_ERROR, e.getMessage());
}
downloadStatistic.putInformation(Statistic.INFO_END_TIME, String.valueOf(dateUtils.currentTimeMillis()));
copyMetadatasFromResource(downloadStatistic, resource);
try {
statisticsLock.writeLock().lock();
statistics.add(downloadStatistic);
} finally {
statisticsLock.writeLock().unlock();
}
LOGGER.debug("Finishing Execution Download test: ResourceID = " + resource.getId());
}
private void testUpload(final Resource resource) {
LOGGER.debug("Executing Resource Upload test: ResourceID = " + resource.getId());
Statistic upLoadStatistic = new Statistic(StatisticType.FILE_UPLOAD, globalExecution.getExecutionId());
upLoadStatistic.putInformation(Statistic.INFO_START_TIME, String.valueOf(dateUtils.currentTimeMillis()));
try {
resource.executeScpUpload();
} catch (Exception e) {
LOGGER.error("Error while execute Resource Upload test: ResourceID = " + resource.getId(), e);
upLoadStatistic.putInformation(Statistic.INFO_ERROR, e.getMessage());
}
upLoadStatistic.putInformation(Statistic.INFO_END_TIME, String.valueOf(dateUtils.currentTimeMillis()));
copyMetadatasFromResource(upLoadStatistic, resource);
try {
statisticsLock.writeLock().lock();
statistics.add(upLoadStatistic);
} finally {
statisticsLock.writeLock().unlock();
}
LOGGER.debug("Finishing Execution Upload test: ResourceID = " + resource.getId());
}
private void deleteResource(final Resource resource) {
LOGGER.debug("Executing Delete Resource: ResourceID = " + resource.getId());
Statistic deleteResourceStatistic = new Statistic(StatisticType.DELETE_RESOURCE,
globalExecution.getExecutionId());
deleteResourceStatistic.putInformation(Statistic.INFO_START_TIME,
String.valueOf(dateUtils.currentTimeMillis()));
try {
infraProvider.deleteResource(resource.getId());
} catch (Exception e) {
deleteResourceStatistic.putInformation(Statistic.INFO_ERROR, e.getMessage());
}
deleteResourceStatistic.putInformation(Statistic.INFO_END_TIME, String.valueOf(dateUtils.currentTimeMillis()));
copyMetadatasFromResource(deleteResourceStatistic, resource);
try {
statisticsLock.writeLock().lock();
statistics.add(deleteResourceStatistic);
} finally {
statisticsLock.writeLock().unlock();
}
LOGGER.debug("Finishing Resource Delete: ResourceID = " + resource.getId());
}
private void checkConnection(final Resource resource) {
LOGGER.debug("Executing Resource Connection test: ResourceID = " + resource.getId());
Statistic testConnStatistic = new Statistic(StatisticType.TEST_CONNECTION, globalExecution.getExecutionId());
testConnStatistic.putInformation(Statistic.INFO_START_TIME, String.valueOf(dateUtils.currentTimeMillis()));
testConnStatistic.setResourceId(resource.getId());
try {
resource.testSshConnection();
} catch (Exception e) {
testConnStatistic.putInformation(Statistic.INFO_ERROR, e.getMessage());
resource.setState(ResourceState.FAILED);
resource.putMetadata(Resource.METADATA_FAIL_TIME, String.valueOf(dateUtils.currentTimeMillis()));
} finally {
testConnStatistic.putInformation(Statistic.INFO_END_TIME, String.valueOf(dateUtils.currentTimeMillis()));
copyMetadatasFromResource(testConnStatistic, resource);
try {
statisticsLock.writeLock().lock();
statistics.add(testConnStatistic);
} finally {
statisticsLock.writeLock().unlock();
}
}
LOGGER.debug("Finishing Execution Connection test: ResourceID = " + resource.getId());
}
private void copyMetadatasFromResource(Statistic statistic, Resource resource) {
statistic.setResourceId(resource.getId());
// statistic.putInformation(Statistic.INFO_RESOURCE_ID,
// resource.getId());
statistic.putInformation(Statistic.INFO_RESOURCE_CONN_INFO,
resource.getMetadataValue(Resource.ENV_HOST) + ":" + resource.getMetadataValue(Resource.ENV_SSH_PORT));
statistic.putInformation(Statistic.INFO_RESOURCE_CREATION_TIME,
resource.getMetadataValue(Resource.METADATA_CREATE_TIME));
statistic.putInformation(Statistic.INFO_RESOURCE_FAIL_TIME,
resource.getMetadataValue(Resource.METADATA_FAIL_TIME));
statistic.putInformation(Statistic.INFO_RESOURCE_EXECUTION_NUMBER,
resource.getMetadataValue(Resource.METADATA_USE_NUMBER));
statistic.putInformation(Statistic.INFO_RESOURCE_UPLOAD_FILE_SIZE,
resource.getMetadataValue(Resource.METADATA_UPLOAD_FILE_SIZE));
statistic.putInformation(Statistic.INFO_RESOURCE_DOWNLOAD_FILE_SIZE,
resource.getMetadataValue(Resource.METADATA_DOWNLOAD_FILE_SIZE));
statistic.putInformation(Statistic.INFO_RESOURCE_UPLOAD_RESULT,
resource.getMetadataValue(Resource.METADATA_UPLOAD_RESULT));
statistic.putInformation(Statistic.INFO_RESOURCE_DOWNLOAD_RESULT,
resource.getMetadataValue(Resource.METADATA_DOWNLOAD_FILE_SIZE));
}
private void incrementStatisticMetadata(Statistic statistic, String metaData) {
Integer increment;
String actualValue = statistic.getInformation(metaData);
if (actualValue == null || actualValue.isEmpty() || !AppUtils.isNumeric(actualValue)) {
increment = new Integer(0);
} else {
increment = new Integer(Integer.valueOf(actualValue) + 1);
}
statistic.putInformation(metaData, String.valueOf(increment));
}
public Statistic getGlobalExecution() {
return globalExecution;
}
}
package org.fogbowcloud.statistic.tester.core;
import java.io.FileInputStream;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import java.util.UUID;
import org.apache.commons.io.IOUtils;
import org.fogbowcloud.statistic.tester.core.util.AppConfiguration;
import org.fogbowcloud.statistic.tester.model.ExecutionConfiguration;
import org.fogbowcloud.statistic.tester.model.Statistic;
import org.fogbowcloud.statistic.tester.service.StatisticService;
import com.google.gson.Gson;
public class FogbowExerciserMain {
private static List<ExecutionConfiguration> executionsConfigs;
private static Properties properties;
public static void main(String[] args) throws Exception {
properties = new Properties();
FileInputStream input = new FileInputStream(args[0]);
properties.load(input);
StatisticGenerator sg = new StatisticGenerator();
generateExecutionConfig();
for(ExecutionConfiguration executionConfig : executionsConfigs){
System.setProperty(AppConfiguration.TEST_EXECUTION_ID, generateExecutionId());
StatisticService statsService = new StatisticService(properties);
ExecutionManager em = new ExecutionManager(properties, statsService, executionConfig);
em.execute();
Statistic currentGlobalStatistic = em.getGlobalExecution();
List<Statistic> currentExecutionStatistics = statsService
.getStatisticsByExecutionId(currentGlobalStatistic.getExecutionId());
System.out.println(sg.defaultExecutionStatistic(currentGlobalStatistic, currentExecutionStatistics, properties, executionConfig));
}
}
private static void generateExecutionConfig() throws Exception {
Gson gson = new Gson();
FileInputStream fis = new FileInputStream((String) properties.get(AppConfiguration.EXECUTION_CONFIGURATION));
String configs = IOUtils.toString(fis);
executionsConfigs = Arrays.asList(gson.fromJson(configs, ExecutionConfiguration[].class));
}