ENV OTF_MONGO_HOSTS=localhost:27017\r
ENV OTF_MONGO_REPLICASET=rs0\r
ENV OTF_MONGO_DATABASE=otf\r
-ENV OTF_CAMUNDA_DB_URL=localhost:3306/camunda\r
-ENV OTF_CAMUNDA_DB_USERNAME=root\r
+ENV OTF_CAMUNDA_DB_URL=localhost:3306/otf-camunda\r
+ENV OTF_CAMUNDA_DB_USERNAME=username\r
ENV OTF_CAMUNDA_DB_PASSWORD=password\r
ENV AAF_PERM_TYPE=type\r
ENV CADI_HOSTNAME=localhost\r
</dependency>\r
<dependency>\r
<groupId>org.camunda.bpm.springboot</groupId>\r
- <artifactId>camunda-bpm-spring-boot-starter-webapp</artifactId>\r
+ <artifactId>camunda-bpm-spring-boot-starter-webapp-ee</artifactId>\r
<version>${camunda.springboot.version}</version>\r
</dependency>\r
<dependency>\r
\r
<cadi.version>2.1.10</cadi.version>\r
<docker.registry>registry.hub.docker.io</docker.registry>\r
- <camunda.version>7.10.0</camunda.version>\r
+ <camunda.version>7.10.0-ee</camunda.version>\r
<camunda.bpm.assert.version>2.0-alpha2</camunda.bpm.assert.version>\r
<camunda.bpm.base.version>7.10.0</camunda.bpm.base.version>\r
<camunda.bpm.mail.version>1.1.0</camunda.bpm.mail.version>\r
<camunda.bpm.reactor.version>2.1.2</camunda.bpm.reactor.version>\r
- <camunda.bpm.version>7.10.4</camunda.bpm.version>\r
+ <camunda.bpm.version>7.10.4-ee</camunda.bpm.version>\r
<camunda.bpm.external-task-client.version>1.1.1</camunda.bpm.external-task-client.version>\r
<camunda.mockito.version>3.2.1</camunda.mockito.version>\r
<camunda.spin.version>1.6.6</camunda.spin.version>\r
// WorkflowTask.printWorkflowTaskResources();\r
}\r
\r
- private void saveTestHeadResults(String businessKey) {\r
+ private void saveTestHeadResults(String businessKey, String groupId) {\r
Query query = new Query();\r
+ //TODO: Update needs to be changed to work with Azure\r
+ query.addCriteria(Criteria.where("groupId").is(groupId));\r
query.addCriteria(Criteria.where("businessKey").is(businessKey));\r
Update update = new Update();\r
update.set("testHeadResults", testHeadResults);\r
\r
package org.oran.otf.camunda.delegate.otf.common;\r
\r
+import com.mongodb.client.result.UpdateResult;\r
import org.oran.otf.camunda.exception.TestExecutionException;\r
import org.oran.otf.camunda.model.ExecutionConstants;\r
import org.oran.otf.camunda.workflow.utility.WorkflowUtility;\r
import org.oran.otf.common.model.TestExecution;\r
import org.oran.otf.common.repository.TestExecutionRepository;\r
import org.oran.otf.common.utility.Utility;\r
-import com.mongodb.client.result.UpdateResult;\r
\r
import java.util.Arrays;\r
import java.util.Date;\r
// processBusinessKey from the delegate execution because it is saved to the database before the\r
// user can modify the value.\r
Query query = new Query();\r
+ //TODO: Update query needs to be changed for Azure\r
+ query.addCriteria((Criteria.where("groupId").is(testExecution.getGroupId())));\r
query.addCriteria(Criteria.where("businessKey").is(execution.getProcessBusinessKey()));\r
Update update = new Update();\r
update.set("testResult", testExecution.getTestResult());\r
import org.springframework.stereotype.Component;\r
\r
@Component\r
+@Conditional(value= FilterCondition.class)\r
public class PostResultsToDMaaPDelegate implements JavaDelegate {\r
\r
private static Logger logger = LoggerFactory.getLogger(PostResultsToDMaaPDelegate.class);\r
// Add the testExecution to the parentTestExecution\r
parentTestExecution.getTestInstanceResults().add(testExecution);\r
Query query = new Query();\r
+ //TODO: Update for Azure\r
+ query.addCriteria((Criteria.where("groupId").is(parentTestExecution.getGroupId())));\r
query.addCriteria(Criteria.where("_id").is(parentTestExecution.get_id()));\r
// Also add businessKey as a criteria because the object won't be found if the business key\r
// was somehow modified in the workflow.\r
// Add the testExecution to the parentTestExecution\r
parentTestExecution.getTestInstanceResults().add(testExecution);\r
Query query = new Query();\r
+ //TODO: Update for Azure\r
+ query.addCriteria((Criteria.where("groupId").is(parentTestExecution.getGroupId())));\r
query.addCriteria(Criteria.where("_id").is(parentTestExecution.get_id()));\r
// Also add businessKey as a criteria because the object won't be found if the business key\r
// was somehow modified in the workflow.\r
\r
private void saveResult(TestExecution testExecution) {\r
Query query = new Query();\r
+ //TODO: Update for Azure\r
+ query.addCriteria((Criteria.where("groupId").is(testExecution.getGroupId())));\r
query.addCriteria(Criteria.where("_id").is(testExecution.get_id()));\r
// Also add businessKey as a criteria because the object won't be found if the business key\r
// was somehow modified in the workflow.\r
import org.oran.otf.camunda.model.ExecutionConstants;\r
import org.oran.otf.camunda.workflow.utility.WorkflowUtility;\r
import org.oran.otf.common.model.TestExecution;\r
+import org.oran.otf.common.repository.TestExecutionRepository;\r
import org.oran.otf.common.utility.Utility;\r
import com.google.gson.JsonObject;\r
import com.mongodb.client.result.UpdateResult;\r
testExecution.setTestResult(result);\r
testExecution.setProcessInstanceId(execution.getProcessInstanceId());\r
\r
-\r
Query query = new Query();\r
+ //TODO: Update needs new query for Azure\r
+ query.addCriteria((Criteria.where("groupId").is(testExecution.getGroupId())));\r
query.addCriteria(Criteria.where("businessKey").is(execution.getProcessBusinessKey()));\r
Update update = new Update();\r
update.set("testResult", testExecution.getTestResult());\r
// available.\r
testExecution.setProcessInstanceId(processInstance.getProcessInstanceId());\r
Query query = new Query();\r
+ //TODO: Update for Azure\r
+ query.addCriteria((Criteria.where("groupId").is(testExecution.getGroupId())));\r
query.addCriteria(Criteria.where("_id").is(testExecution.get_id()));\r
// Also add businessKey as a criteria because the object won't be found if the business key\r
// was somehow modified in the workflow.\r
// available.\r
testExecution.setProcessInstanceId(processInstance.getProcessInstanceId());\r
Query query = new Query();\r
+ //TODO: Update for Azure\r
+ query.addCriteria((Criteria.where("groupId").is(testExecution.getGroupId())));\r
query.addCriteria(Criteria.where("_id").is(testExecution.get_id()));\r
// Also add businessKey as a criteria because the object won't be found if the business key\r
// was somehow modified in the workflow.\r
testExecution.setTestResult(testResult);\r
testExecution.setTestResultMessage(testResultMessage);\r
Query query = new Query();\r
+ query.addCriteria(Criteria.where("groupId").is(testExecution.getGroupId()));\r
query.addCriteria(Criteria.where("_id").is(testExecution.get_id()));\r
// Also add businessKey as a criteria because the object won't be found if the business key\r
// was somehow modified in the workflow.\r
testExecution.setTestResult(testResult);\r
testExecution.setTestResultMessage(testResultMessage);\r
Query query = new Query();\r
+ query.addCriteria(Criteria.where("groupId").is(testExecution.getGroupId()));\r
query.addCriteria(Criteria.where("_id").is(testExecution.get_id()));\r
// Also add businessKey as a criteria because the object won't be found if the business key\r
// was somehow modified in the workflow.\r
\r
package org.oran.otf.common.model;\r
\r
+import javax.validation.constraints.NotNull;\r
import org.oran.otf.common.utility.gson.Convert;\r
import java.io.Serializable;\r
import java.util.Date;\r
\r
import org.bson.types.ObjectId;\r
import org.springframework.data.annotation.Id;\r
+import org.springframework.data.mongodb.core.index.CompoundIndex;\r
import org.springframework.data.mongodb.core.index.Indexed;\r
import org.springframework.data.mongodb.core.mapping.Document;\r
\r
@Id\r
private ObjectId _id;\r
\r
- @Indexed(unique = true)\r
+ //@Indexed(unique = true)\r
+ @NotNull\r
private String testHeadName;\r
\r
private String testHeadDescription;\r
\r
package org.oran.otf.common.model;\r
\r
+import javax.ws.rs.PATCH;\r
import org.oran.otf.common.model.local.ParallelFlowInput;\r
import org.oran.otf.common.utility.gson.Convert;\r
import java.io.Serializable;\r
\r
for(Group group : groupsToCheck)\r
{\r
- if(group.getParentGroupId() != null) // if there is a parent\r
+ if(group != null && group.getParentGroupId() != null) // if there is a parent\r
{\r
String parentId = group.getParentGroupId().toString();\r
Group parentGroup = groupMap.get(parentId);\r
MongoCredential.createScramSha1Credential(username, database, password.toCharArray());\r
\r
MongoClientOptions options =\r
- MongoClientOptions.builder().sslEnabled(false).requiredReplicaSetName(replicaSet).build();\r
+ // MongoClientOptions.builder().sslEnabled(false).requiredReplicaSetName(replicaSet).build();\r
+ MongoClientOptions.builder().sslEnabled(true).build();\r
\r
String[] hostArray = hosts.split(",");\r
ArrayList<ServerAddress> hosts = new ArrayList<>();\r
username: ${OTF_CAMUNDA_DB_USERNAME}\r
password: ${OTF_CAMUNDA_DB_PASSWORD}\r
cadi:\r
- enabled: true\r
+ enabled: false\r
aaf-mech-id: ${AAF_ID}\r
aaf-mech-password: ${AAF_MECH_PASSWORD}\r
aaf-perm-type: ${AAF_PERM_TYPE}\r
port.http: 8000\r
tomcat.max-threads: 800\r
# ssl:\r
- key-store-type: 'PKCS12'\r
- key-store: ${OTF_CERT_PATH}\r
- key-store-password: ${OTF_CERT_PASS}\r
+ #key-store-type: 'PKCS12'\r
+ #key-store: ${OTF_CERT_PATH}\r
+ #key-store-password: ${OTF_CERT_PASS}\r
security:\r
- https-only: true\r
+ https-only: false\r
require-ssl: false\r
server.port: 8443\r
server.port.http: 8080\r
ENV CAMUNDAAPI_URL=https://localhost:31313/\r
ENV CAMUNDAAPI_AAFID=username\r
ENV CAMUNDAAPI_AAFPASSWORD=password\r
+ENV AZURE_STORAGE_ACCOUNT=otffiles\r
+ENV AZURE_STORAGE_CONTAINER=files\r
+ENV AZURE_STORAGE_KEY=key123\r
+ENV MONGO_CONNECTION_STRING=connection_string\r
ENV MONGO_BASEURL=localhost:27017/\r
ENV MONGO_DBOTF=otf\r
ENV MONGO_REPLICASET=rs0\r
+/* Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+##############################################################################*/\r
+\r
+\r
// Karma configuration file, see link for more information\r
// https://karma-runner.github.io/1.0/config/configuration-file.html\r
\r
+/* Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+##############################################################################*/\r
+\r
+\r
// Protractor configuration file, see link for more information\r
// https://github.com/angular/protractor/blob/master/lib/config.ts\r
\r
//Get the xml of the default bpmn file\r
async getDefaultFlow() {\r
return new Promise((resolve, reject) => {\r
- this._fileTransfer.get('5d0a5357e6624a3ef0d16164').subscribe(\r
- data => {\r
- let bpmn = new Buffer(data as Buffer);\r
- resolve(bpmn.toString());\r
- },\r
- err => {\r
- this.errorPopup(err.toString());\r
- reject(err);\r
- }\r
- );\r
+ resolve("<?xml version=\"1.0\" encoding=\"UTF-8\"?\>\<bpmn:definitions targetNamespace=\"http:\/\/bpmn.io\/schema\/bpmn\" \>\<bpmn:process id=\"\" isExecutable=\"true\"\>\<bpmn:startEvent id=\"StartEvent_1\" /\>\</bpmn:process\>\<bpmndi:BPMNDiagram id=\"BPMNDiagram_1\"\>\<bpmndi:BPMNPlane id=\"BPMNPlane_1\" bpmnElement=\"Process_1ai7kus\"\>\<bpmndi:BPMNShape id=\"_BPMNShape_StartEvent_2\" bpmnElement=\"StartEvent_1\"\>\<dc:Bounds x=\"179\" y=\"159\" width=\"36\" height=\"36\" /\>\</bpmndi:BPMNShape\>\</bpmndi:BPMNPlane\>\</bpmndi:BPMNDiagram\>\</bpmn:definitions\>")\r
+ // this._fileTransfer.get('5d0a5357e6624a3ef0d16164').subscribe(\r
+ // data => {\r
+ // let bpmn = new Buffer(data as Buffer);\r
+ // resolve(bpmn.toString());\r
+ // },\r
+ // err => {\r
+ // this.errorPopup(err.toString());\r
+ // reject(err);\r
+ // }\r
+ // );\r
});\r
}\r
\r
this._fileTransfer.get(this.ptd.currentInstance.bpmnFileId).subscribe(\r
result => {\r
let bpmn = new Buffer(result as Buffer);\r
+ console.log(bpmn.toString())\r
resolve(bpmn.toString());\r
},\r
err => {\r
if (mappedArr.hasOwnProperty(_id)) {\r
mappedElem = mappedArr[_id];\r
// If the element is not at the root level, add it to its parent array of children.\r
- if (mappedElem.parentGroupId) {\r
+ if (mappedElem.parentGroupId && mappedArr[mappedElem['parentGroupId']]) {\r
mappedArr[mappedElem['parentGroupId']]['children'].push(mappedElem);\r
}\r
// If the element is at the root level, add it to first level elements array.\r
"@angular/platform-browser": "^7.0.2",\r
"@angular/platform-browser-dynamic": "^7.0.2",\r
"@angular/router": "^6.1.7",\r
+ "@azure/storage-blob": "^10.5.0",\r
"@casl/ability": "^3.1.2",\r
"@casl/angular": "^2.1.0",\r
"@casl/mongoose": "^2.3.1",\r
"express-rate-limit": "^3.3.2",\r
"feathers-authentication-management": "^2.0.1",\r
"feathers-hooks-common": "^4.17.14",\r
- "feathers-mongoose": "^6.2.0",\r
+ "feathers-mongoose": "^8.1.0",\r
"feathers-permissions": "^0.2.1",\r
"file-saver": "^2.0.1",\r
"font-awesome": "^4.7.0",\r
"mat-progress-buttons": "^7.0.10",\r
"material-design-icons": "^3.0.1",\r
"moment": "^2.22.2",\r
- "mongoose": "^5.6.4",\r
- "mongoose-gridfs": "^0.5.0",\r
+ "mongoose": "^5.7.1",\r
+ "mongoose-gridfs": "^1.2.10",\r
"multer": "^1.4.1",\r
"ng-cli-pug-loader": "^0.1.7",\r
"ng2-codemirror": "^1.1.3",\r
"aafId": "CAMUNDAAPI_AAFID",\r
"aafPassword": "CAMUNDAAPI_AAFPASSWORD"\r
},\r
+ "azure": {\r
+ "storage": {\r
+ "account": "AZURE_STORAGE_ACCOUNT",\r
+ "key": "AZURE_STORAGE_KEY",\r
+ "container": "AZURE_STORAGE_CONTAINER"\r
+ }\r
+ },\r
"mongo": {\r
"baseUrl": "MONGO_BASEURL",\r
"dbOtf": "MONGO_DBOTF",\r
"replicaSet": "MONGO_REPLICASET",\r
"username": "MONGO_USERNAME",\r
- "password": "MONGO_PASSWORD"\r
+ "password": "MONGO_PASSWORD",\r
+ "connectionString": "MONGO_CONNECTION_STRING"\r
},\r
"otf": {\r
"url" : "OTF_URL",\r
"host": "0.0.0.0",\r
"path": "otf/api/",\r
"base-path": "otf/api/v1/",\r
- "port": 80,\r
+ "port": 8080,\r
"ssl": false,\r
"public": "../../../client/dist/",\r
"paginate": {\r
const jobTypes = ['test-execution-job'];\r
const agenda = new Agenda({\r
db: {\r
- address: 'mongodb://' + mongoData.username + ':' + mongoData.password + '@' + mongoData.baseUrl + mongoData.dbOtf + '?replicaSet=' + mongoData.replicaSet,\r
+ address: mongoData.connectionString,\r
collection: 'agenda'\r
+ },\r
+ sort: {\r
+ nextRunAt: 1\r
}\r
});\r
\r
const updatedBy = require('./hooks/updatedBy');\r
const {iff, disallow, isProvider, skipRemainingHooks} = require('feathers-hooks-common');\r
const { ObjectID } = require('mongodb');\r
+const shardKey = require('./hooks/insertShardKey.js');\r
\r
module.exports = {\r
before: {\r
- all: [paginateOption(), skipRemainingHooks(context => !context.params.provider)],\r
+ all: [shardKey(), paginateOption(), skipRemainingHooks(context => !context.params.provider)],\r
find: [\r
function(context){\r
const {query} = context.params;\r
\r
const util = require('../../lib/otf-util');\r
const request = require('request');\r
+const errors = require('@feathersjs/errors');\r
module.exports = function (options = {}) { // eslint-disable-line no-unused-vars\r
return async context => {\r
let options = {\r
}).then(result => {\r
\r
}).catch(err => {\r
- console.log(err);\r
+ throw new errors.GeneralError(err.body.message);\r
});\r
};\r
};\r
return Promise.reject(context.error);\r
}\r
}).catch(err => {\r
- \r
+ throw new errors.GeneralError(err.body.message);\r
});\r
}\r
});\r
module.exports.groupFilter = function (options = null) {\r
return async context => {\r
\r
+ if (!context.params.provider) {\r
+ return Promise.resolve(context);\r
+ }\r
\r
switch(context.method){\r
case 'get':\r
context.app.services[context.app.get('base-path') + 'groups'].Model.aggregate([\r
{\r
$match: context.params.query\r
- },\r
- {\r
- $graphLookup: {\r
- from: "groups",\r
- startWith: "$parentGroupId",\r
- connectFromField: "parentGroupId",\r
- connectToField: "_id",\r
- as: "parentGroups"\r
+ }\r
+ ]).then(async res => {\r
+ if(res.length){\r
+ for(let i = 0; i < res.length; i++){\r
+ res[i]['parentGroups'] = await getParentGroups(context.app.services[context.app.get('base-path') + 'groups'].Model, res[i]);\r
}\r
}\r
- ]).then(res => {\r
resolve(res);\r
}).catch(err => {\r
throw new errors.GeneralError(err);\r
};\r
};\r
\r
+getParentGroups = async function(model, group){\r
+ return new Promise(async (resolve, reject) => {\r
+ let parentGroups = [];\r
+ if(group.parentGroupId){\r
+ model.aggregate([\r
+ {\r
+ $match: {\r
+ '_id': group.parentGroupId\r
+ }\r
+ }\r
+ ]).then(async res => {\r
+ if(res[0] && res[0].parentGroupId){\r
+ parentGroups.unshift(res[0]);\r
+ let parents = await getParentGroups(model, res[0]);\r
+ parents.forEach(e => {\r
+ parentGroups.unshift(e);\r
+ });\r
+ }\r
+ resolve(parentGroups);\r
+ }).catch(err => {\r
+ reject(err);\r
+ })\r
+ }else{\r
+ resolve();\r
+ }\r
+ });\r
+ \r
+}\r
+\r
getChildGroups = async function(model, group){\r
return new Promise(async (resolve, reject) => {\r
let childGroups = [];\r
--- /dev/null
+/* Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+##############################################################################*/\r
+\r
+\r
+module.exports = function (service) { \r
+ return async context => {\r
+ // If the method is find, get, or create, return\r
+ if(context.method == 'find' || context.method == 'get' || context.method == 'create'){\r
+ return context;\r
+ }\r
+\r
+ // If the id or service does not exist, return\r
+ if(!context.id || !context.service){\r
+ return context;\r
+ }\r
+\r
+ let serviceString;\r
+ if(service){\r
+ serviceString = context.app.get('base-path') + service;\r
+ }else{\r
+ serviceString = context.path;\r
+ }\r
+\r
+ if(!context.app.services[serviceString].Model){\r
+ return context;\r
+ }\r
+\r
+ // If the entity data hasnt been set, get and set it\r
+ if(!context.params.entityData){\r
+ context.params.entityData = await context.app.services[serviceString].get(context.id, { provider: undefined});\r
+ }\r
+\r
+ // Find the shard key from the model\r
+ let shardKeys = {};\r
+ Object.keys(context.app.services[serviceString].Model.schema.options.shardKey).forEach(key => {\r
+ shardKeys[key] = context.params.entityData[key];\r
+ })\r
+\r
+ // Add the shard keys to the query\r
+ Object.assign(context.params.query, shardKeys);\r
+\r
+ return context;\r
+\r
+ }\r
+}
\ No newline at end of file
return context;\r
}\r
\r
- const params = Object.assign({}, context.params, { provider: null });\r
+ //see if the entity has already been pulled and stored, else get it\r
+ if(!context.params.entityData){\r
+ const params = Object.assign({}, context.params, { provider: null });\r
+ context.params.entityData = await service.get(context.id, params);\r
+ }\r
\r
- const result = await service.get(context.id, params);\r
- throwUnlessCan(action, result);\r
+ throwUnlessCan(action, context.params.entityData);\r
\r
if (action === 'get') {\r
- context.result = pick(result, allowedFields);\r
+ context.result = pick(context.params.entityData, allowedFields);\r
}else{\r
if(context.data){\r
Object.keys(context.data).forEach(key => {\r
if(key == "$push"){\r
Object.keys(context.data['$push']).forEach(k => {\r
- throwUnlessCan(action, result, k);\r
+ throwUnlessCan(action, context.params.entityData, k);\r
});\r
}else{\r
- throwUnlessCan(action, result, key);\r
+ throwUnlessCan(action, context.params.entityData, key);\r
}\r
})\r
}\r
const channels = require('./channels');\r
const authentication = require('./authentication');\r
\r
+// Azure Storage\r
+const azureStorage = require('../lib/azure-storage');\r
+\r
// Mongoose\r
const mongoose = require('../lib/mongoose');\r
const _mongoose = require('mongoose');\r
// });\r
// });\r
\r
+// Configure Azure storage\r
+app.configure(azureStorage);\r
+\r
// Configure Mongoose driver before setting up services that use Mongoose\r
app.configure(mongoose);\r
-\r
// Set up database dependent components once the connection is ready to prevent unexpected results\r
_mongoose.connection.on('open', (ref) => {\r
app.configure(authentication);\r
##############################################################################*/\r
\r
\r
-const mongooseGridFS = require('mongoose-gridfs');\r
-\r
module.exports = function (app) {\r
- const mongoose = app.get('mongooseClient');\r
-\r
- const gridfs = mongooseGridFS({\r
- collection: 'fs',\r
- model: 'File',\r
- mongooseConnection: mongoose.connection\r
- });\r
+ const mongooseClient = app.get('mongooseClient');\r
+ const { Schema } = mongooseClient;\r
+ const files = new Schema({//Esquema base de los usuarios.\r
+ length: {\r
+ type: Number\r
+ },\r
+ chunkSize: {\r
+ type: Number\r
+ },\r
+ uploadDate: {\r
+ type: Date\r
+ },\r
+ md5: {\r
+ type: String\r
+ },\r
+ filename: {\r
+ type: String\r
+ },\r
+ contentType: {\r
+ type: String\r
+ },\r
+ metadata: {\r
+ type: Object\r
+ },\r
+ path:{\r
+ type:String,\r
+ readonly:true\r
+ }\r
+ },{collection:`fs.files`, shardKey: { filename: 1 }});\r
+ \r
+ return mongooseClient.model('files', files);\r
\r
- return gridfs.model;\r
};\r
ownerId: { type: Schema.Types.ObjectId, ref: 'users', required: true },\r
mechanizedIds: [String]\r
}, {\r
+ shardKey: { parentGroupId: 1},\r
timestamps: true\r
});\r
\r
lockedAt: { type: String },\r
lastRunAt: { type: String }\r
}, {\r
+ shardKey: { 'data.testSchedule._testInstanceId': 1 },\r
timestamps: true\r
});\r
\r
updatedBy: { type: Schema.Types.ObjectId, ref: 'users'},\r
createdBy: { type: Schema.Types.ObjectId, ref: 'users'}\r
}, {\r
- timestamps: true,\r
- minimize: false\r
+ shardKey: { groupId: 1 },\r
+ timestamps: true,\r
+ minimize: false\r
});\r
\r
\r
historicTestDefinition: { type: Object }\r
\r
}, {\r
+ shardKey: { groupId: 1 },\r
timestamps: false\r
});\r
\r
authorizationEnabled: { type: Boolean, default: false },\r
isPublic: { type: Boolean }\r
}, {\r
+ shardKey: { groupId: 1 },\r
timestamps: true\r
});\r
\r
updatedBy: { type: Schema.Types.ObjectId, ref: 'users' },\r
createdBy: { type: Schema.Types.ObjectId, ref: 'users' }\r
}, {\r
+ shardKey: { groupId: 1 },\r
timestamps: true,\r
minimize: false\r
});\r
testDefinitions: [{type: Schema.Types.ObjectId, ref: 'testDefinitions'}]\r
}, { _id: false})\r
}, {\r
+ shardKey: { email: 1 },\r
timestamps: true\r
});\r
\r
// console.log(err);\r
// }\r
// Set as deployed\r
- delete params.query;\r
+ params.query = {};\r
\r
//check to see if the process definition Key was set\r
// if (!data.testDefinition.processDefinitionKey) {\r
}\r
};\r
let options = {\r
- url: this.options.app.get('otf').url + this.options.app.get('base-path') + 'file-transfer',\r
+ uri: this.options.app.get('otf').url + this.options.app.get('base-path') + 'file-transfer',\r
headers: {\r
'Authorization': params.headers.Authorization,\r
'Content-Type': "multipart/form-data"\r
before: {\r
all: [authenticate('jwt'), permissions('execute')],\r
find: [ throwError(new errors.MethodNotAllowed()) ],\r
- get: [ throwError(new errors.MethodNotAllowed())],\r
+ get: [ ],\r
create: [\r
(context) => {\r
context.data.executorId = context.params.user._id;\r
\r
const Response = require('http-response-object');\r
const Readable = require('stream').Readable;\r
-const mongooseGridFS = require('mongoose-gridfs');\r
+const { createModel } = require('mongoose-gridfs');\r
const AdmZip = require('adm-zip');\r
const errors = require('@feathersjs/errors');\r
+const mongoose = require('mongoose');\r
+const ObjectID = require('mongodb').ObjectID;\r
+const {\r
+ Aborter,\r
+ BlockBlobURL,\r
+ BlobURL,\r
+ downloadBlobToBuffer,\r
+ uploadStreamToBlockBlob\r
+ } = require("@azure/storage-blob");\r
+\r
\r
class Service {\r
constructor (options) {\r
this.options = options || {};\r
- this.mongoose = this.options.app.get('mongooseClient');\r
- this.gridfs = mongooseGridFS({\r
- collection: 'fs',\r
- model: 'File',\r
- mongooseConnection: this.mongoose.connection\r
- });\r
- this.FileModel = this.gridfs.model;\r
+ // this.File = createModel({\r
+ // collection: 'fs',\r
+ // model: 'File',\r
+ // mongooseConnection: mongoose.connection\r
+ // });\r
}\r
\r
async find (params) {\r
}\r
\r
async get (id, params) {\r
- let content = await this.callReadFile(id).then(res => {\r
- return res;\r
- });\r
-\r
- if(params.query && params.query.robot){\r
- content = await this.createRobotResponse(content);\r
+ if(!id){\r
+ throw new errors.BadRequest("File id is required");\r
}\r
- return content;\r
+\r
+ // Get Blob url\r
+ const blob = BlobURL.fromContainerURL(this.options.app.get('azureStorageContainerUrl'), id);\r
+ const stats = await blob.getProperties().catch(err => {\r
+ throw new errors.NotFound();\r
+ });\r
+ // const content = await blob.download(Aborter.none, 0);\r
+ const buffer = Buffer.alloc(stats.contentLength);\r
+ await downloadBlobToBuffer(\r
+ Aborter.timeout(30 * 60 * 1000),\r
+ buffer,\r
+ blob,\r
+ 0,\r
+ undefined,\r
+ {\r
+ blockSize: 4 * 1024 * 1024, // 4MB block size\r
+ parallelism: 20, // 20 concurrency\r
+ }\r
+ );\r
+\r
+ return buffer;\r
}\r
\r
async create (data, params) {\r
throw new BadRequest("No files found to upload")\r
}\r
\r
- let promises = [];\r
-\r
+ let promises = [];\r
+ \r
files.forEach(file => {\r
- let promise = new Promise( (resolve, reject) => {\r
-\r
- let stream = new Readable();\r
- stream.push(file.buffer);\r
- stream.push(null);\r
-\r
- this.FileModel.write(\r
- {\r
- filename: file.originalname,\r
- contentType: file.mimeType\r
- },\r
- stream,\r
- function (error, savedAttachment) {\r
- if (error) {\r
- logger.error(error);\r
- reject(error);\r
- } else {\r
- stream.destroy();\r
- resolve(savedAttachment);\r
- }\r
- }\r
- );\r
+ let promise = new Promise(async (resolve, reject) => {\r
+\r
+ let exists, filename, blob, blockBlob;\r
+ // Creates the file id and checks that there isn't already a file with that name\r
+ do {\r
+\r
+ filename = ObjectID().toString();\r
+ \r
+ blob = BlobURL.fromContainerURL(this.options.app.get('azureStorageContainerUrl'), filename);\r
+ blockBlob = BlockBlobURL.fromBlobURL(blob);\r
+ exists = await blockBlob.getProperties().catch(err => {\r
+ if(err.statusCode == 404){\r
+ exists = false;\r
+ }\r
+ });\r
+\r
+ } while (exists);\r
+ \r
+ blockBlob.upload(Aborter.none, file.buffer.toString(), file.size).then(\r
+ result => {\r
+ result._id = filename;\r
+ resolve(result);\r
+ }\r
+ ).catch(\r
+ error => {\r
+ reject(error);\r
+ }\r
+ );\r
\r
})\r
\r
return result;\r
}\r
\r
+ \r
+\r
async update (id, data, params) {\r
return new Response(200, {});\r
}\r
}\r
\r
async remove (id, params) {\r
- let err = await this.callUnlinkFile(id).then(err => {\r
- return err;\r
- });\r
+ // let err = await this.callUnlinkFile(id).then(err => {\r
+ // return err;\r
+ // });\r
\r
- if(err){\r
- throw errors.GeneralError(err);\r
- } \r
+ // if(err){\r
+ // throw errors.GeneralError(err);\r
+ // } \r
\r
return new Response(200, {});\r
}\r
(context) => { console.log("AFTER PERMISSIONS")},\r
canExecute(), \r
async (context) => {\r
- const fullUrl = this.options.app.get('otf').url + context.app.get('base-path') + 'schedule-test';\r
+ const fullUrl = context.app.get('otf').url + context.app.get('base-path') + 'schedule-test';\r
\r
context.data.executorId = context.params.user._id;\r
\r
permissions('jobs'),\r
canExecute(),\r
async function (context) {\r
- const fullUrl = this.options.app.get('otf').url + context.app.get('base-path') + 'cancel-test';\r
+ const fullUrl = context.app.get('otf').url + context.app.get('base-path') + 'cancel-test';\r
\r
if (context.id == null || context.params.user._id == null ||\r
utils.isValidObjectId(context.id) || utils.isValidObjectId(context.params.user._id)) {\r
paginate\r
};\r
\r
- const mongoConfig = app.get('mongo');\r
- const rateLimitConfig = app.get('rate-limit');\r
-\r
- const createUserLimiter = new RateLimit({\r
- store: new MongoStore({\r
- uri: 'mongodb://' + mongoConfig.username + ':' + mongoConfig.password + '@' + mongoConfig.baseUrl +\r
- mongoConfig.dbOtf + '?replicaSet=' + mongoConfig.replicaSet,\r
- collectionName: rateLimitConfig.mongoStore.collection\r
- }),\r
- max: app.get('rate-limit').services.users.max,\r
- windowsMs: app.get('rate-limit').services.users.windowMs,\r
- message: app.get('rate-limit').services.users.message\r
- });\r
-\r
// Initialize our service with any options it requires,\r
// and limit any POST methods.\r
- app.use(app.get('base-path') + 'users', (req, res, next) => {\r
- if (req.method === 'POST') {\r
- createUserLimiter(req, res, next);\r
- } else {\r
- next();\r
- }\r
- }, createService(options));\r
+ app.use(app.get('base-path') + 'users', createService(options));\r
\r
// Get our initialized service so that we can register hooks\r
const service = app.service(app.get('base-path') + 'users');\r
--- /dev/null
+/* Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+##############################################################################*/\r
+\r
+\r
+const {\r
+ ServiceURL,\r
+ StorageURL,\r
+ SharedKeyCredential,\r
+ TokenCredential,\r
+ ContainerURL\r
+ } = require("@azure/storage-blob");\r
+\r
+module.exports = function(app) {\r
+ // Enter your storage account name and shared key\r
+ const account = app.get('azure').storage.account;\r
+ const accountKey = app.get('azure').storage.key;\r
+ const container = app.get('azure').storage.container;\r
+ \r
+ // Use SharedKeyCredential with storage account and account key\r
+ const sharedKeyCredential = new SharedKeyCredential(account, accountKey);\r
+ \r
+ // Use TokenCredential with OAuth token\r
+ const tokenCredential = new TokenCredential("token");\r
+ tokenCredential.token = "renewedToken"; // Renew the token by updating token field of token credential\r
+ \r
+ // Use sharedKeyCredential, tokenCredential or anonymousCredential to create a pipeline\r
+ const pipeline = StorageURL.newPipeline(sharedKeyCredential);\r
+ \r
+ // List containers\r
+ const serviceURL = new ServiceURL(\r
+ // When using AnonymousCredential, following url should include a valid SAS or support public access\r
+ `https://${account}.blob.core.windows.net`,\r
+ pipeline\r
+ );\r
+\r
+ const containerURL = ContainerURL.fromServiceURL(serviceURL, container);\r
+\r
+ app.set('azureStorageContainerUrl', containerURL);\r
+}
\ No newline at end of file
\r
module.exports = function (app) {\r
const mongoData = app.get('mongo');\r
- const connectionString = 'mongodb://' + mongoData.username + ':' + mongoData.password + '@' + mongoData.baseUrl + mongoData.dbOtf + '?replicaSet=' + mongoData.replicaSet;\r
\r
- mongoose.connect(connectionString, { useNewUrlParser: true }).then(null, error => {\r
+ mongoose.connect(mongoData.connectionString, { useNewUrlParser: true, useFindAndModify: false, useUnifiedTopology: true, useCreateIndex: true }).then(null, error => {\r
console.log('caught', error.message);\r
});\r
mongoose.Promise = global.Promise;\r
+/* Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+##############################################################################*/\r
+\r
+\r
const assert = require('assert');\r
const rp = require('request-promise');\r
const url = require('url');\r
+/* Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+##############################################################################*/\r
+\r
+\r
const assert = require('assert');\r
const feathers = require('@feathersjs/feathers');\r
const groupFilter = require('../../src/hooks/group-filter');\r
+/* Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+##############################################################################*/\r
+\r
+\r
const assert = require('assert');\r
const app = require('../../src/app');\r
\r
+/* Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+##############################################################################*/\r
+\r
+\r
const assert = require('assert');\r
const app = require('../../src/app');\r
\r
+/* Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+##############################################################################*/\r
+\r
+\r
const assert = require('assert');\r
const app = require('../../src/app');\r
\r
+/* Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+##############################################################################*/\r
+\r
+\r
const assert = require('assert');\r
const app = require('../../src/app');\r
\r
+/* Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+##############################################################################*/\r
+\r
+\r
const assert = require('assert');\r
const app = require('../../src/app');\r
\r
+/* Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+##############################################################################*/\r
+\r
+\r
const assert = require('assert');\r
const app = require('../../src/app');\r
\r
+/* Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+##############################################################################*/\r
+\r
+\r
const assert = require('assert');\r
const app = require('../../src/app');\r
\r
+/* Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+##############################################################################*/\r
+\r
+\r
const assert = require('assert');\r
const app = require('../../src/app');\r
\r
+/* Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+##############################################################################*/\r
+\r
+\r
const assert = require('assert');\r
const app = require('../../src/app');\r
\r
+/* Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+##############################################################################*/\r
+\r
+\r
const assert = require('assert');\r
const app = require('../../src/app');\r
\r
+/* Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+##############################################################################*/\r
+\r
+\r
const assert = require('assert');\r
const app = require('../../src/app');\r
\r
+/* Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+##############################################################################*/\r
+\r
+\r
const assert = require('assert');\r
const app = require('../../src/app');\r
\r
+/* Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+##############################################################################*/\r
+\r
+\r
const assert = require('assert');\r
const app = require('../../src/app');\r
\r
<dependency>\r
<artifactId>httpmime</artifactId>\r
<groupId>org.apache.httpcomponents</groupId>\r
- <version>4.5.7</version>\r
+ <version>4.5.7-SNAPSHOT</version>\r
</dependency>\r
\r
<dependency>\r
MongoCredential.createScramSha1Credential(username, database, password.toCharArray());\r
\r
MongoClientOptions options =\r
- MongoClientOptions.builder().sslEnabled(false).requiredReplicaSetName(replicaSet).build();\r
+ MongoClientOptions.builder().sslEnabled(true).build();\r
\r
String[] hostArray = hosts.split(",");\r
ArrayList<ServerAddress> hosts = new ArrayList<>();\r
}\r
\r
private boolean isOtfMechanizedIdentifier(String email) {\r
- return email.equalsIgnoreCase("email@localhost")\r
- || email.equalsIgnoreCase("email@localhost")\r
- || email.equalsIgnoreCase("email@localhost")\r
- || email.equalsIgnoreCase("email@localhost")\r
- || email.equalsIgnoreCase("email@localhost");\r
+ return email.equalsIgnoreCase(System.getenv("AAF_ID"));\r
}\r
\r
private BpmnInstance findBpmnInstance(TestDefinition testDefinition, int version, boolean latest)\r
// logger.error(Utilities.getStackTrace(e));\r
// }\r
\r
- // If a test definition id is supplied, the request intends to update an existing test\r
+ // If a test definition id is supplied, the request intends to updatean existing test\r
// definition.\r
if (request.getTestDefinitionId() != null) {\r
// Check if the test definition exists in the database.\r
\r
private boolean isAuthorized(String authorization) {\r
User user = Utilities.findUserByAuthHeader(authorization, userRepository);\r
- return (user.getEmail().equalsIgnoreCase("email@localhost")\r
- || user.getEmail().equalsIgnoreCase("email@localhost"));\r
+ return (user.getEmail().equalsIgnoreCase(System.getenv("AAF_ID")));\r
}\r
\r
private DeployTestStrategyRequest mapToDeployTestStrategyRequest(String body) {\r
}\r
\r
private Response updateTestHeadFields(TestHead testHead, TestHead newTestHead, User user) {\r
- Query select = Query.query(Criteria.where("_id").is(testHead.get_id()));\r
+ Query select = Query.query(Criteria.where("_id").is(testHead.get_id())).addCriteria(Criteria.where("groupId").is(testHead.getGroupId()));\r
Update update = new Update();\r
\r
if (newTestHead.getTestHeadName() != null) {\r
@Id\r
private ObjectId _id;\r
\r
- @Indexed(unique = true)\r
+ //@Indexed(unique = true)\r
private String testHeadName;\r
\r
private String testHeadDescription;\r
public static void saveTestResult(\r
MongoTemplate mongoOperation, TestExecution execution, String testResult) {\r
Query query = new Query();\r
+ query.addCriteria(Criteria.where("groupId").is(execution.getGroupId()));\r
query.addCriteria(Criteria.where("businessKey").is(execution.getBusinessKey()));\r
Update update = new Update();\r
update.set("testResult", testResult);\r
\r
for(Group group : groupsToCheck)\r
{\r
- if(group.getParentGroupId() != null) // if there is a parent\r
+ if(group != null && group.getParentGroupId() != null) // if there is a parent\r
{\r
String parentId = group.getParentGroupId().toString();\r
Group parentGroup = groupMap.get(parentId);\r
server.port.http=8080\r
security.require-ssl=false\r
\r
-server.ssl.key-store-type=PKCS12\r
-server.ssl.key-store=${OTF_CERT_PATH}\r
-server.ssl.key-store-password=${OTF_CERT_PASS}\r
+#server.ssl.key-store-type=PKCS12\r
+#server.ssl.key-store=${OTF_CERT_PATH}\r
+#server.ssl.key-store-password=${OTF_CERT_PASS}\r
#server.servlet.context-path=/otf/api\r
#spring.jersey.application-path=/otf\r
#springfox.documentation.swagger.v2.path=/otf/api/swagger.json\r
\r
spring.resources.add-mappings=true\r
\r
-ssl.flag =${https-only.flag:true}\r
+ssl.flag =false\r
#springfox.documentation.auto-startup=false\r
#springfox.documentation.swagger.v2.path=/otf/swagger.json\r
\r
#config\r
-aaf.enabled=true\r
+aaf.enabled=false\r
aaf.call-timeout=10000\r
aaf.conn-timeout=6000\r
aaf.default-realm=localhost\r
user = new User();\r
user.setFirstName("Mech");\r
user.setLastName("Id");\r
- user.setEmail("email@localhost");\r
+ user.setEmail(System.getenv("AAF_ID"));\r
mongoTemplate.save(user, "users");\r
user = mongoTemplate.findOne(userQuery, User.class);\r
}\r