Use env vars in front and back end projects
Change-Id: Id093976032f079c62efd9fcaaf0617e05ceef3b0
This commit is contained in:
parent
ac6e1c1300
commit
40d9f11047
@ -2,7 +2,7 @@ NEBULOUS_BROKER_URL=158.37.63.86
|
||||
NEBULOUS_BROKER_PORT=31609
|
||||
NEBULOUS_BROKER_USERNAME=admin
|
||||
NEBULOUS_BROKER_PASSWORD=admin
|
||||
POSTGRES_DB_HOST=localhost
|
||||
POSTGRES_DB_HOST=db
|
||||
POSTGRES_DB_NAME=fog_broker
|
||||
POSTGRES_DB_PORT=5432
|
||||
POSTGRES_DB_USER=dbuser
|
||||
|
@ -18,10 +18,10 @@ def perform_evaluation(data_table, relative_wr_data, immediate_wr_data, node_nam
|
||||
# The first category is for the all False and the last for the all True values
|
||||
fog_node_categories = {i: [] for i in range(len(boolean_criteria) + 1)}
|
||||
|
||||
# Iterate over the list of fog nodes to count the '1' (True) values and assign categories
|
||||
for i in range(len(node_names)):
|
||||
# Iterate over the list of nodes to count the '1' (True) values and assign categories
|
||||
for i in range(len(node_ids)):
|
||||
true_count = sum(data_table[boolean][i] for boolean in boolean_criteria)
|
||||
fog_node_categories[true_count].append(node_names[i])
|
||||
fog_node_categories[true_count].append(node_ids[i])
|
||||
|
||||
# Remove the boolean criteria from the data_table
|
||||
for boolean in boolean_criteria:
|
||||
@ -41,8 +41,8 @@ def perform_evaluation(data_table, relative_wr_data, immediate_wr_data, node_nam
|
||||
for fog_node_high in fog_node_categories[sorted_categories[higher_cat]]:
|
||||
for fog_node_low in fog_node_categories[sorted_categories[higher_cat + 1]]:
|
||||
# Create a constraint for each pair of fog nodes (high > low)
|
||||
high_scores = [-data_table[criterion][node_names.index(fog_node_high)] for criterion in data_table]
|
||||
low_scores = [-data_table[criterion][node_names.index(fog_node_low)] for criterion in data_table]
|
||||
high_scores = [-data_table[criterion][node_ids.index(fog_node_high)] for criterion in data_table]
|
||||
low_scores = [-data_table[criterion][node_ids.index(fog_node_low)] for criterion in data_table]
|
||||
constraint = [h - l for h, l in zip(high_scores, low_scores)]
|
||||
A_boolean.append(constraint)
|
||||
b_boolean.append(0) # The score difference must be greater than 0
|
||||
@ -124,8 +124,8 @@ def perform_evaluation(data_table, relative_wr_data, immediate_wr_data, node_nam
|
||||
num_of_dmus = len(next(iter(data_table.values())))
|
||||
Cols_No = len(criteria_list)
|
||||
DEA_Scores = []
|
||||
epsilon = 0.00000 # Lower bound of the variables
|
||||
|
||||
# epsilon = 0.000001 # Lower bound of the variables
|
||||
epsilon = 0
|
||||
# Iterating over each DMU to Perform DEA
|
||||
for dmu_index in range(num_of_dmus):
|
||||
# Gathering values for the current DMU
|
||||
@ -171,7 +171,7 @@ def perform_evaluation(data_table, relative_wr_data, immediate_wr_data, node_nam
|
||||
"DEA Score": DEA_Scores[i],
|
||||
"Rank": int(DEA_Scores_Ranked[i])
|
||||
}
|
||||
for i in range(len(node_names))
|
||||
for i in range(len(node_ids))
|
||||
]
|
||||
|
||||
# Return successful results
|
||||
@ -184,9 +184,9 @@ def perform_evaluation(data_table, relative_wr_data, immediate_wr_data, node_nam
|
||||
# relative_wr_data: [{'LHSCriterion': 'Accountability', 'Operator': 1, 'Intense': 2, 'RHSCriterion': 'Compliance'}]
|
||||
# immediate_wr_data: [{'Criterion': 'Compliance', 'Operator': 1, 'Value': 0.5}]
|
||||
#
|
||||
# node_names = ['2ad4bd97-d932-42a5-860e-e607a50f161d', 'e917581d-1a62-496b-9d2e-05972fe309e9', '78aca9a8-8c14-4c7d-af34-72cef0da992d', 'd2bddce9-4118-41a9-b528-3bac32b13312']
|
||||
# node_ids = ['2ad4bd97-d932-42a5-860e-e607a50f161d', 'e917581d-1a62-496b-9d2e-05972fe309e9', '78aca9a8-8c14-4c7d-af34-72cef0da992d', 'd2bddce9-4118-41a9-b528-3bac32b13312']
|
||||
#
|
||||
# Evaluation_JSON = perform_evaluation(data_table, [], [], node_names)
|
||||
# Evaluation_JSON = perform_evaluation(data_table, [], [], node_ids)
|
||||
# pretty_json = json.dumps(Evaluation_JSON)
|
||||
|
||||
|
||||
@ -218,13 +218,13 @@ def perform_evaluation(data_table, relative_wr_data, immediate_wr_data, node_nam
|
||||
# # # "immediate_wr_data":[{"Criterion":"Accountability","Operator":1,"Value":0.2}]}
|
||||
# # # w1>=0.2 and w1<=0.5
|
||||
# #
|
||||
# node_names = ['Fog Node 1', 'Fog Node 2', 'Fog Node 3', 'Fog Node 4', 'Fog Node 5']
|
||||
# node_ids = ['Fog Node 1', 'Fog Node 2', 'Fog Node 3', 'Fog Node 4', 'Fog Node 5']
|
||||
#
|
||||
# Evaluation_JSON = perform_evaluation(data_table, relative_wr_data, immediate_wr_data, node_names)
|
||||
# Evaluation_JSON = perform_evaluation(data_table, relative_wr_data, immediate_wr_data, node_ids)
|
||||
# print("Evaluation_JSON:", Evaluation_JSON)
|
||||
|
||||
|
||||
# Evaluation_JSON = perform_evaluation(data_table, [], [], node_names)
|
||||
# Evaluation_JSON = perform_evaluation(data_table, [], [], node_ids)
|
||||
# pretty_json = json.dumps(Evaluation_JSON)
|
||||
# print(pretty_json)
|
||||
# print("Evaluation_JSON:", Evaluation_JSON)
|
||||
|
@ -1,12 +1,8 @@
|
||||
import os
|
||||
# import read_file
|
||||
import get_data as file
|
||||
import random
|
||||
import json
|
||||
from datetime import datetime
|
||||
import data_types as attr_data_types
|
||||
from Evaluation import perform_evaluation
|
||||
from data_types import get_attr_data_type
|
||||
import db.db_functions as db_functions
|
||||
|
||||
# Boolean_Variables = ['Extend offered network capacity', 'Extend offered processing capacity', 'Extend offered memory capacity',
|
||||
@ -16,48 +12,93 @@ Boolean_Variables = [
|
||||
"0cf00a53-fd33-4887-bb38-e0bbb04e3f3e", "d95c1dae-1e22-4fb4-9cdc-743e96d0dddc",
|
||||
"8cd09fe9-c119-4ccd-b651-0f18334dbbe4", "7147995c-8e68-4106-ab24-f0a7673eb5f5", "c1c5b3c9-6178-4d67-a7e3-0285c2bf98ef"]
|
||||
|
||||
# Used to transform SAL's response before sending to DataGrid
|
||||
# This version is designed to read the structure of SAL's response obtained from POSTMAN
|
||||
def extract_node_candidate_data(json_file_path):
|
||||
with open(json_file_path, 'r') as file:
|
||||
json_data = json.load(file)
|
||||
# Used to extract_SAL_node_candidate_data from Use Side for DataGrid
|
||||
def extract_SAL_node_candidate_data_Front(json_data):
|
||||
default_criteria_list = ["cores", "ram", "disk", "memoryPrice", "price"]
|
||||
|
||||
if isinstance(json_data, dict): # Single node dictionary
|
||||
json_data = [json_data] # Wrap it in a list
|
||||
|
||||
extracted_data = []
|
||||
node_ids = []
|
||||
node_names = []
|
||||
|
||||
for item in json_data:
|
||||
hardware_info = item.get("nodeCandidate", {}).get("hardware", {})
|
||||
hardware_info = item.get("hardware", {})
|
||||
# Extract default criteria values
|
||||
default_criteria_values = {criteria: hardware_info.get(criteria, 0.0) if criteria in hardware_info else item.get(criteria, 0.0) for criteria in default_criteria_list}
|
||||
|
||||
# Correctly extract the providerName from the cloud information
|
||||
cloud_info = item.get("cloud", {}) # get the cloud info or default to an empty dict
|
||||
api_info = cloud_info.get("api", {})
|
||||
provider_name = api_info.get("providerName", "Unknown Provider")
|
||||
|
||||
# each item is now a dictionary
|
||||
node_data = {
|
||||
"name": item['name'],
|
||||
"id": item['id'],
|
||||
"nodeId": item.get("nodeCandidate", {}).get("nodeId"),
|
||||
"nodeCandidateType": item.get("nodeCandidate", {}).get("nodeCandidateType"),
|
||||
"price": item.get("nodeCandidate", {}).get("price", 0.0),
|
||||
"pricePerInvocation": item.get("nodeCandidate", {}).get("pricePerInvocation", 0.0),
|
||||
"memoryPrice": item.get("nodeCandidate", {}).get("memoryPrice", 0.0),
|
||||
"hardware": {
|
||||
"id": hardware_info.get("id"),
|
||||
"name": hardware_info.get("name"),
|
||||
"providerId": hardware_info.get("providerId"),
|
||||
"cores": hardware_info.get("cores"),
|
||||
"ram": hardware_info.get("ram") * 1024 if hardware_info.get("ram") else None, # Assuming RAM needs conversion from GB to MB
|
||||
"disk": hardware_info.get("disk"),
|
||||
"fpga": hardware_info.get("fpga")
|
||||
}
|
||||
"nodeId": item.get("nodeId", ''),
|
||||
"id": item.get('id', ''),
|
||||
"nodeCandidateType": item.get("nodeCandidateType", ''),
|
||||
**default_criteria_values, # Unpack default criteria values into node_data
|
||||
"hardware": hardware_info,
|
||||
"location": item.get("location", {}),
|
||||
"image": item.get("image", {}),
|
||||
"providerName": provider_name
|
||||
}
|
||||
extracted_data.append(node_data)
|
||||
node_ids.append(item['id'])
|
||||
node_names.append(item.get('name', ''))
|
||||
node_ids.append(node_data["id"])
|
||||
|
||||
number_of_nodes = len(json_data)
|
||||
# print("Before create_node_name")
|
||||
node_names.append(create_node_name(node_data)) # call create_node_name function
|
||||
# print("After create_node_name")
|
||||
|
||||
return extracted_data, number_of_nodes, node_ids, node_names
|
||||
return extracted_data, node_ids, node_names
|
||||
|
||||
# Used to create node names for DataGrid
|
||||
def create_node_name(node_data):
|
||||
node_type = node_data.get("nodeCandidateType", "UNKNOWN_TYPE")
|
||||
|
||||
# Initialize default values
|
||||
node_city = ""
|
||||
node_country = ""
|
||||
node_os_family = "Unknown OS"
|
||||
provider_name = node_data.get("providerName", "")
|
||||
|
||||
# Only 50 nodes
|
||||
# Safely access nested properties for city and country
|
||||
location = node_data.get("location")
|
||||
if location and "geoLocation" in location and location["geoLocation"]:
|
||||
geo_location = location["geoLocation"]
|
||||
node_city = geo_location.get("city", "")
|
||||
node_country = geo_location.get("country", "")
|
||||
|
||||
image = node_data.get("image")
|
||||
if image and "operatingSystem" in image and image["operatingSystem"]:
|
||||
operating_system = image["operatingSystem"]
|
||||
node_os_family = operating_system.get("operatingSystemFamily", node_os_family)
|
||||
|
||||
cores = node_data.get("cores", "")
|
||||
ram = node_data.get("ram", "")
|
||||
|
||||
# Construct the node name with conditional inclusions
|
||||
node_name_parts = [node_type]
|
||||
if node_city and node_country:
|
||||
node_name_parts.append(f"{node_city}, {node_country}")
|
||||
|
||||
if provider_name:
|
||||
node_name_parts.append(f"Provider: {provider_name}")
|
||||
|
||||
node_name_parts.append(f"OS: {node_os_family}")
|
||||
|
||||
if cores:
|
||||
node_name_parts.append(f"Cores: {cores} ")
|
||||
if ram:
|
||||
node_name_parts.append(f"RAM: {ram} ")
|
||||
|
||||
node_name = " - ".join(part for part in node_name_parts if part) # Only include non-empty parts
|
||||
return node_name
|
||||
|
||||
# Used to extract_SAL_node_candidate_data from App Side working with Optimizer
|
||||
def extract_SAL_node_candidate_data(json_string):
|
||||
# print("Entered in extract_SAL_node_candidate_data")
|
||||
try:
|
||||
json_data = json.loads(json_string) # Ensure json_data is a list of dictionaries
|
||||
except json.JSONDecodeError as e:
|
||||
@ -73,9 +114,9 @@ def extract_SAL_node_candidate_data(json_string):
|
||||
"nodeId": item.get("nodeId", ''),
|
||||
"id": item.get('id', ''),
|
||||
"nodeCandidateType": item.get("nodeCandidateType", ''),
|
||||
"price": item.get("price", 0.0),
|
||||
"pricePerInvocation": item.get("pricePerInvocation", 0.0),
|
||||
"memoryPrice": item.get("memoryPrice", 0.0),
|
||||
"price": item.get("price", ''),
|
||||
"pricePerInvocation": item.get("pricePerInvocation", ''),
|
||||
"memoryPrice": item.get("memoryPrice", ''),
|
||||
"hardware": item.get("hardware", {})
|
||||
}
|
||||
extracted_data.append(node_data)
|
||||
@ -85,48 +126,11 @@ def extract_SAL_node_candidate_data(json_string):
|
||||
number_of_nodes = len(extracted_data)
|
||||
node_ids = [node['id'] for node in extracted_data]
|
||||
node_names = [node['id'] for node in extracted_data]
|
||||
|
||||
return extracted_data, number_of_nodes, node_ids, node_names
|
||||
|
||||
|
||||
|
||||
# Used to transform SAL's response all nodes
|
||||
# def extract_SAL_node_candidate_data(sal_reply):
|
||||
# # Parse the JSON string in the body of the SAL reply
|
||||
# body = sal_reply.get('body', '')
|
||||
# extracted_data = []
|
||||
#
|
||||
# try:
|
||||
# json_data = json.loads(body)
|
||||
# except json.JSONDecodeError as e:
|
||||
# print(f"Error parsing JSON: {e}")
|
||||
# return extracted_data
|
||||
#
|
||||
# for item in json_data:
|
||||
# node_data = {
|
||||
# "name": item.get('name', ''),
|
||||
# "name": item.get('id', ''),
|
||||
# "id": item.get('id', ''),
|
||||
# "nodeId": item.get("nodeId", ''),
|
||||
# "nodeCandidateType": item.get("nodeCandidateType", ''),
|
||||
# "price": item.get("price", 0.0),
|
||||
# "pricePerInvocation": item.get("pricePerInvocation", 0.0),
|
||||
# "memoryPrice": item.get("memoryPrice", 0.0),
|
||||
# "hardware": item.get("hardware", {})
|
||||
# }
|
||||
# extracted_data.append(node_data)
|
||||
#
|
||||
# number_of_nodes = len(extracted_data)
|
||||
# node_ids = [node['id'] for node in extracted_data]
|
||||
# node_names = [node['name'] for node in extracted_data]
|
||||
# if not node_names:
|
||||
# node_names = node_ids
|
||||
#
|
||||
# return extracted_data, number_of_nodes, node_ids, node_names
|
||||
return extracted_data, node_ids, node_names
|
||||
|
||||
|
||||
# Used to map the criteria from SAL's response with the selected criteria (from frontend)
|
||||
def create_criteria_mapping(selected_items, extracted_data):
|
||||
def create_criteria_mapping():
|
||||
field_mapping = {
|
||||
# "Cost": "price",
|
||||
"Operating cost": "price",
|
||||
@ -137,7 +141,8 @@ def create_criteria_mapping(selected_items, extracted_data):
|
||||
}
|
||||
return field_mapping
|
||||
|
||||
# Used to create the required structure for the Evaluation
|
||||
|
||||
# Used to create the required structure for the Evaluation in process_evaluation_data endpoint
|
||||
def transform_grid_data_to_table(json_data):
|
||||
grid_data = json_data.get('gridData', [])
|
||||
relative_wr_data = json_data.get('relativeWRData', [])
|
||||
@ -153,8 +158,9 @@ def transform_grid_data_to_table(json_data):
|
||||
boolean_value_mapping = {"True": 1, "False": 0}
|
||||
|
||||
for node in grid_data:
|
||||
node_name = node.get('name')
|
||||
# node_name = node.get('name')
|
||||
node_ids.append(node.get('id'))
|
||||
node_id = node.get('id')
|
||||
|
||||
criteria_data = {}
|
||||
for criterion in node.get('criteria', []):
|
||||
@ -177,7 +183,7 @@ def transform_grid_data_to_table(json_data):
|
||||
# Handle or log the error for values that can't be converted to float
|
||||
pass
|
||||
|
||||
temp_data_table[node_name] = criteria_data
|
||||
temp_data_table[node_id] = criteria_data
|
||||
|
||||
# Collect all criteria titles
|
||||
criteria_titles.extend(criteria_data.keys())
|
||||
@ -189,7 +195,7 @@ def transform_grid_data_to_table(json_data):
|
||||
data_table = {title: [] for title in criteria_titles}
|
||||
|
||||
# Populate the final data table
|
||||
for node_name, criteria_data in temp_data_table.items():
|
||||
for node_id, criteria_data in temp_data_table.items():
|
||||
for title, value in criteria_data.items():
|
||||
data_table[title].append(value)
|
||||
|
||||
@ -254,64 +260,6 @@ def check_json_file_exists(app_id):
|
||||
|
||||
return os.path.exists(file_path)
|
||||
|
||||
|
||||
# Used to read ALL the saved Data for an Application
|
||||
# def read_application_data(app_id):
|
||||
# # Directory path and file path
|
||||
# app_dir = os.path.join("app_dirs", app_id)
|
||||
# file_path = os.path.join(app_dir, "data.json")
|
||||
#
|
||||
# # Check if the file exists
|
||||
# if os.path.exists(file_path):
|
||||
# # Read and parse the JSON file
|
||||
# with open(file_path, 'r', encoding='utf-8') as f:
|
||||
# data = json.load(f)
|
||||
# # Extract specific parts of the data
|
||||
# # selected_criteria = data.get("selectedCriteria", None)
|
||||
# data_table, relative_wr_data, immediate_wr_data, node_names, node_ids = transform_grid_data_to_table(data)
|
||||
# else:
|
||||
# print(f"No data found for application ID {app_id}.") # Return everything empty
|
||||
# data_table, relative_wr_data, immediate_wr_data, node_names, node_ids = [], [], [], [], []
|
||||
#
|
||||
# return data_table, relative_wr_data, immediate_wr_data, node_names, node_ids
|
||||
|
||||
# Used to read the saved Data of the Application ONLY for the Nodes returned by SAL
|
||||
def read_application_data(app_id, node_ids_SAL):
|
||||
# Directory path and file path
|
||||
app_dir = os.path.join("app_dirs", app_id)
|
||||
file_path = os.path.join(app_dir, f"{app_id}_data.json")
|
||||
|
||||
# Initialize variables to return in case of no data or an error
|
||||
data_table, relative_wr_data, immediate_wr_data, node_names, node_ids = [], [], [], [], []
|
||||
|
||||
# Check if the file exists
|
||||
if os.path.exists(file_path):
|
||||
# Read and parse the JSON file
|
||||
with open(file_path, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
|
||||
# Filter gridData based on node_ids_SAL
|
||||
filtered_grid_data = [node for node in data['gridData'] if node['id'] in node_ids_SAL]
|
||||
|
||||
# Create a new JSON structure with filtered gridData
|
||||
filtered_json_data = {
|
||||
"gridData": filtered_grid_data,
|
||||
"relativeWRData": data['relativeWRData'],
|
||||
"immediateWRData": data['immediateWRData'],
|
||||
"nodeNames": [node['name'] for node in filtered_grid_data], # Assuming you want to filter nodeNames as well
|
||||
"nodeIds": node_ids_SAL # Assuming you want to include nodeIds from the filtered list
|
||||
}
|
||||
|
||||
# Call transform_grid_data_to_table with the new filtered JSON data
|
||||
data_table, relative_wr_data, immediate_wr_data, node_names, node_ids = transform_grid_data_to_table(filtered_json_data)
|
||||
else:
|
||||
print(f"No data found for application ID {app_id}.")
|
||||
|
||||
return data_table, relative_wr_data, immediate_wr_data, node_names
|
||||
|
||||
|
||||
|
||||
#Used to create data table from SAL's response in app_side
|
||||
def create_data_table(selected_criteria, extracted_data, field_mapping):
|
||||
# Initialize the data table with lists for each criterion
|
||||
data_table = {criterion: [] for criterion in selected_criteria}
|
||||
@ -339,59 +287,23 @@ def create_data_table(selected_criteria, extracted_data, field_mapping):
|
||||
|
||||
return data_table
|
||||
|
||||
|
||||
|
||||
import random
|
||||
|
||||
# def append_evaluation_results(sal_reply_body, scores_and_ranks):
|
||||
# # Check if sal_reply_body is a string and convert it to a Python object
|
||||
# if isinstance(sal_reply_body, str):
|
||||
# sal_reply_body = json.loads(sal_reply_body)
|
||||
#
|
||||
# if scores_and_ranks:
|
||||
# # Create a dictionary mapping Ids to scores and ranks
|
||||
# eval_results_dict = {result['Id']: (result['DEA Score'], result['Rank'])
|
||||
# for result in scores_and_ranks}
|
||||
#
|
||||
# # Iterate over each node in sal_reply_body and append Score and Rank
|
||||
# for node in sal_reply_body:
|
||||
# node_id = node.get('id') # Assuming the ID is directly under the node
|
||||
# if node_id in eval_results_dict:
|
||||
# score, rank = eval_results_dict[node_id]
|
||||
# node["score"] = score
|
||||
# node["rank"] = rank
|
||||
# else:
|
||||
# # If scores_and_ranks is empty
|
||||
# for index, node in enumerate(sal_reply_body):
|
||||
# if index == 0:
|
||||
# # First node gets a score of 1 and rank of 1
|
||||
# node["score"] = 1
|
||||
# node["rank"] = 1
|
||||
# else:
|
||||
# # Assign random scores between 0.33 and 0.93 to the rest
|
||||
# node["score"] = random.uniform(0.33, 0.93)
|
||||
#
|
||||
# # Sort nodes by score in descending order to calculate ranks
|
||||
# sorted_nodes = sorted(sal_reply_body[1:], key=lambda x: x["score"], reverse=True)
|
||||
#
|
||||
# # Assign ranks based on sorted order, starting from 2 since the first node is ranked 1
|
||||
# for rank, node in enumerate(sorted_nodes, start=2):
|
||||
# node["rank"] = rank
|
||||
#
|
||||
# # Combine the first node with the rest
|
||||
# sal_reply_body = [sal_reply_body[0]] + sorted_nodes
|
||||
#
|
||||
# return sal_reply_body
|
||||
|
||||
|
||||
# Used to Append "Score" and "Rank" for each node in SAL's response JSON
|
||||
def append_evaluation_results(sal_reply_body, scores_and_ranks):
|
||||
# Check if sal_reply_body is a string and convert it to a Python object
|
||||
if isinstance(sal_reply_body, str):
|
||||
sal_reply_body = json.loads(sal_reply_body)
|
||||
|
||||
# Check if there is only one node and scores_and_ranks are empty
|
||||
if len(sal_reply_body) == 1 and not scores_and_ranks:
|
||||
# Directly assign score and rank to the single node
|
||||
sal_reply_body[0]["score"] = 1
|
||||
sal_reply_body[0]["rank"] = 1
|
||||
return sal_reply_body
|
||||
|
||||
# Proceed if there are multiple nodes or scores_and_ranks is not empty
|
||||
# Create a dictionary mapping Ids to scores and ranks
|
||||
eval_results_dict = {result['Id']: (result['DEA Score'], result['Rank'])
|
||||
for result in scores_and_ranks}
|
||||
for result in scores_and_ranks if scores_and_ranks}
|
||||
|
||||
# Iterate over each node in sal_reply_body and append Score and Rank
|
||||
for node in sal_reply_body:
|
||||
@ -404,37 +316,387 @@ def append_evaluation_results(sal_reply_body, scores_and_ranks):
|
||||
return sal_reply_body
|
||||
|
||||
|
||||
def convert_value(value, criterion_info, is_matched):
|
||||
if criterion_info['type'] == 5: # Boolean type
|
||||
return 1 if value else 0
|
||||
elif criterion_info['type'] == 1: # Ordinal type
|
||||
if is_matched: # For matched nodes, use the mapping
|
||||
ordinal_value_mapping = {"High": 3, "Medium": 2, "Low": 1}
|
||||
return ordinal_value_mapping.get(value, value) # Use the value from mapping, or keep it as is if not found
|
||||
else: # For unmatched nodes, assign default value
|
||||
return 1
|
||||
return value
|
||||
|
||||
|
||||
# Used to read the saved application data CFSB when triggered by Optimizer
|
||||
def read_application_data(app_id, sal_reply_body):
|
||||
app_dir = os.path.join("app_dirs", app_id)
|
||||
file_path = os.path.join(app_dir, f"{app_id}_data.json")
|
||||
|
||||
data_table, relative_wr_data, immediate_wr_data, node_names, node_ids = {}, [], [], [], []
|
||||
|
||||
if isinstance(sal_reply_body, str):
|
||||
sal_reply_body = json.loads(sal_reply_body)
|
||||
|
||||
if os.path.exists(file_path):
|
||||
print(f"JSON file found for application ID {app_id}.")
|
||||
with open(file_path, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
selected_criteria = {criterion['title']: criterion for criterion in data.get('selectedCriteria', [])}
|
||||
|
||||
# Define the default list criteria mapping
|
||||
default_list_criteria_mapping = {
|
||||
"Operating cost": "price",
|
||||
"Memory Price": "memoryPrice",
|
||||
"Number of CPU Cores": "cores",
|
||||
"Memory Size": "ram",
|
||||
"Storage Capacity": "disk"
|
||||
}
|
||||
|
||||
for criterion in selected_criteria:
|
||||
data_table[criterion] = []
|
||||
|
||||
matched_node_ids = [node['id'] for node in data.get('gridData', []) if node['id'] in [n['id'] for n in sal_reply_body]]
|
||||
unmatched_node_ids = [n['id'] for n in sal_reply_body if n['id'] not in matched_node_ids]
|
||||
|
||||
# Process MATCHED nodes
|
||||
for node in data.get('gridData', []):
|
||||
if node['id'] in matched_node_ids:
|
||||
node_ids.append(node['id'])
|
||||
# node_names.append(node.get('name', 'Unknown'))
|
||||
for crit, criterion_info in selected_criteria.items():
|
||||
value = next((criterion['value'] for criterion in node['criteria'] if criterion['title'] == crit), None)
|
||||
converted_value = convert_value(value, criterion_info, is_matched=True)
|
||||
data_table[crit].append(converted_value)
|
||||
|
||||
# Process UNMATCHED nodes
|
||||
for node_id in unmatched_node_ids:
|
||||
node_data = next((node for node in sal_reply_body if node['id'] == node_id), {})
|
||||
node_ids.append(node_id)
|
||||
for criterion, crit_info in selected_criteria.items():
|
||||
mapped_field = default_list_criteria_mapping.get(criterion, '')
|
||||
value = node_data.get(mapped_field, 0.001 if crit_info['type'] == 2 else False)
|
||||
converted_value = convert_value(value, crit_info, is_matched=False)
|
||||
data_table[criterion].append(converted_value)
|
||||
|
||||
node_names = node_ids
|
||||
relative_wr_data, immediate_wr_data = data.get('relativeWRData', []), data.get('immediateWRData', [])
|
||||
|
||||
else: # There is not any node id match - Proceed only with the nodes from SAL's reply
|
||||
print(f"No JSON file found for application ID {app_id}. Proceed only with data from SAL.")
|
||||
extracted_data_SAL, node_ids_SAL, node_names_SAL = extract_SAL_node_candidate_data(sal_reply_body)
|
||||
selected_criteria = ["Number of CPU Cores", "Memory Size"]
|
||||
field_mapping = create_criteria_mapping()
|
||||
data_table = create_data_table(selected_criteria, extracted_data_SAL, field_mapping)
|
||||
# Assign relativeWRData and immediateWRData regardless of node ID matches
|
||||
relative_wr_data = []
|
||||
immediate_wr_data = []
|
||||
node_ids = node_ids_SAL
|
||||
node_names = node_ids
|
||||
|
||||
return data_table, relative_wr_data, immediate_wr_data, node_names, node_ids
|
||||
|
||||
|
||||
# Used to generate random values for DataGrid
|
||||
def random_value_based_on_type(data_type, criterion_info=None):
|
||||
if data_type == 1: # Ordinal
|
||||
# Assuming 'values' are part of criterion_info for ordinal types
|
||||
return random.choice(criterion_info.get('values', ["High", "Medium", "Low"]))
|
||||
elif data_type == 5: # Boolean
|
||||
return random.choice([True, False])
|
||||
else: # Numeric
|
||||
# Default case for numeric types
|
||||
return round(random.uniform(1, 100), 2)
|
||||
|
||||
|
||||
# Used to parse Patini's JSON
|
||||
def parse_device_info_from_file(file_path):
|
||||
with open(file_path, 'r') as file:
|
||||
json_data = json.load(file)
|
||||
device_names = []
|
||||
device_info = {
|
||||
'id': json_data['_id'],
|
||||
'name': json_data['name'], # Save the device name
|
||||
'deviceInfo': json_data['deviceInfo'],
|
||||
'creationDate': json_data['creationDate'],
|
||||
'lastUpdateDate': json_data['lastUpdateDate'],
|
||||
'status': json_data['status'],
|
||||
'metrics': {
|
||||
'cpu': json_data['metrics']['metrics']['cpu'],
|
||||
'uptime': json_data['metrics']['metrics']['uptime'],
|
||||
'disk': json_data['metrics']['metrics']['disk'],
|
||||
'ram': json_data['metrics']['metrics']['ram']
|
||||
}
|
||||
}
|
||||
|
||||
# Example of converting and handling ISODate strings, adjust accordingly
|
||||
device_info['creationDate'] = datetime.fromisoformat(device_info['creationDate'].replace("ISODate('", "").replace("')", ""))
|
||||
device_info['lastUpdateDate'] = datetime.fromisoformat(device_info['lastUpdateDate'].replace("ISODate('", "").replace("')", ""))
|
||||
device_info['creationDate'] = device_info['creationDate'].isoformat()
|
||||
device_info['lastUpdateDate'] = device_info['lastUpdateDate'].isoformat()
|
||||
|
||||
# Update the global device_names list
|
||||
device_names.append({'id': device_info['id'], 'name': device_info['name']})
|
||||
return device_names, device_info
|
||||
|
||||
|
||||
#---------------Read Application Data
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# Example usage
|
||||
# extracted_data, NUMBER_OF_FOG_NODES, node_names = extract_node_candidate_data('dummy_data_node_candidates.json')
|
||||
# print(NUMBER_OF_FOG_NODES)
|
||||
# print(node_names)
|
||||
|
||||
# app_id = 'd535cf554ea66fbebfc415ac837a5828'
|
||||
# data_table, relative_wr_data, immediate_wr_data, node_names, node_ids = read_app_specific_data(app_id)
|
||||
# Used to read the saved Data of the Application ONLY for the Nodes returned by SAL
|
||||
# def read_application_data(app_id, sal_reply_body):
|
||||
# # Directory path and file path
|
||||
# app_dir = os.path.join("app_dirs", app_id)
|
||||
# file_path = os.path.join(app_dir, f"{app_id}_data.json")
|
||||
#
|
||||
# print("Node Names:", node_names)
|
||||
# print("data_table:", data_table)
|
||||
# print("Relative WR Data:", relative_wr_data)
|
||||
# print("Immediate WR Data:", immediate_wr_data)
|
||||
# # Initialize variables to return in case of no data or an error
|
||||
# data_table, relative_wr_data, immediate_wr_data, node_names, node_ids = [], [], [], [], []
|
||||
# # Read data from SAL's reply
|
||||
# extracted_data_SAL, node_ids_SAL, node_names_SAL = extract_SAL_node_candidate_data(sal_reply_body)
|
||||
#
|
||||
# evaluation_results = perform_evaluation(data_table, relative_wr_data, immediate_wr_data, node_names, node_ids)
|
||||
# print("evaluation_results:", evaluation_results)
|
||||
# # Check if the file exists
|
||||
# if os.path.exists(file_path):
|
||||
# # Read and parse the JSON file
|
||||
# with open(file_path, 'r', encoding='utf-8') as f:
|
||||
# data = json.load(f)
|
||||
#
|
||||
# # Extracting the results and saving them into a variable
|
||||
# ScoresAndRanks = evaluation_results['results']
|
||||
# print("ScoresAndRanks:", ScoresAndRanks)
|
||||
# # Filter gridData based on Nodes returned by SAL
|
||||
# filtered_grid_data = [node for node in data.get('gridData', []) if node.get('id') in node_ids_SAL]
|
||||
#
|
||||
# if filtered_grid_data: # if there's at least 1 match
|
||||
# # Create a new JSON structure and call transform_grid_data_to_table
|
||||
# filtered_json_data = {
|
||||
# "gridData": filtered_grid_data,
|
||||
# "relativeWRData": relative_wr_data,
|
||||
# "immediateWRData": immediate_wr_data,
|
||||
# "nodeNames": [node.get('name') for node in filtered_grid_data],
|
||||
# "nodeIds": node_ids_SAL
|
||||
# }
|
||||
#
|
||||
# # Call transform_grid_data_to_table with the filtered JSON data
|
||||
# # data_table, _, _, node_names, _ = transform_grid_data_to_table(filtered_json_data)
|
||||
# data_table, relative_wr_data, immediate_wr_data, node_names, node_ids = transform_grid_data_to_table(filtered_json_data)
|
||||
# if not node_names:
|
||||
# node_names = node_ids
|
||||
#
|
||||
# else: # There is not any node id match - Proceed only with the nodes from SAL's reply
|
||||
# print("No matching node IDs found in the saved data. Proceed only with data from SAL")
|
||||
# selected_criteria = ["Number of CPU Cores", "Memory Size"]
|
||||
# field_mapping = create_criteria_mapping(selected_criteria, extracted_data_SAL)
|
||||
# data_table = create_data_table(selected_criteria, extracted_data_SAL, field_mapping)
|
||||
# # Assign relativeWRData and immediateWRData regardless of node ID matches
|
||||
# relative_wr_data = []
|
||||
# immediate_wr_data = []
|
||||
# node_ids = node_ids_SAL
|
||||
# node_names = node_ids
|
||||
# if not node_names_SAL:
|
||||
# node_names = node_ids
|
||||
# else:
|
||||
# print(f"No JSON file found for application ID {app_id}.")
|
||||
#
|
||||
# # Note: relative_wr_data and immediate_wr_data are returned regardless of the node IDs match
|
||||
# return data_table, relative_wr_data, immediate_wr_data, node_names, node_ids
|
||||
|
||||
# append_evaluation_results('SAL_Response_11EdgeDevs.json', ScoresAndRanks)
|
||||
|
||||
|
||||
#Used to create data table from SAL's response in app_side
|
||||
|
||||
# def read_application_data(app_id, sal_reply_body):
|
||||
# app_dir = os.path.join("app_dirs", app_id)
|
||||
# file_path = os.path.join(app_dir, f"{app_id}_data.json")
|
||||
# data_table, relative_wr_data, immediate_wr_data, node_names, node_ids = {}, [], [], [], []
|
||||
#
|
||||
# default_list_criteria_mapping = {
|
||||
# "Operating cost": "price",
|
||||
# "Memory Price": "memoryPrice",
|
||||
# "Number of CPU Cores": "cores",
|
||||
# "Memory Size": "ram",
|
||||
# "Storage Capacity": "disk"
|
||||
# }
|
||||
#
|
||||
# if isinstance(sal_reply_body, str):
|
||||
# try:
|
||||
# sal_reply_body = json.loads(sal_reply_body)
|
||||
# except json.JSONDecodeError as e:
|
||||
# print(f"Error parsing JSON: {e}")
|
||||
# return data_table, relative_wr_data, immediate_wr_data, node_names, node_ids
|
||||
#
|
||||
# if os.path.exists(file_path):
|
||||
# with open(file_path, 'r', encoding='utf-8') as f:
|
||||
# data = json.load(f)
|
||||
# selected_criteria = {criterion['title']: criterion for criterion in data.get('selectedCriteria', [])}
|
||||
#
|
||||
# for criterion in selected_criteria.keys():
|
||||
# data_table[criterion] = []
|
||||
#
|
||||
# matched_node_ids = set(node['id'] for node in data.get('gridData', [])) & set(node['id'] for node in sal_reply_body)
|
||||
# unmatched_node_ids = set(node['id'] for node in sal_reply_body) - matched_node_ids
|
||||
#
|
||||
# # Ordinal value mapping for MATCHED nodes
|
||||
# ordinal_value_mapping = {"High": 3, "Medium": 2, "Low": 1}
|
||||
#
|
||||
# # Process MATCHED nodes from JSON file
|
||||
# for node in data.get('gridData', []):
|
||||
# if node['id'] in matched_node_ids:
|
||||
# node_ids.append(node['id'])
|
||||
# # node_names.append(node.get('name', 'Unknown'))
|
||||
# for criterion, crit_info in selected_criteria.items():
|
||||
# value = next((c['value'] for c in node['criteria'] if c['title'] == criterion), None)
|
||||
# if value is not None:
|
||||
# value = 1 if value is True else (0 if value is False else value)
|
||||
# else: # Apply default if criterion not found
|
||||
# value = 0.00001 if crit_info['type'] == 2 else 0
|
||||
# data_table[criterion].append(value)
|
||||
#
|
||||
# # Process UNMATCHED nodes from sal_reply_body
|
||||
# for node_id in unmatched_node_ids:
|
||||
# node_data = next((node for node in sal_reply_body if node['id'] == node_id), {})
|
||||
# node_ids.append(node_id)
|
||||
# for criterion, crit_info in selected_criteria.items():
|
||||
# mapped_field = default_list_criteria_mapping.get(criterion, '')
|
||||
# value = node_data.get(mapped_field, 0.00001 if crit_info['type'] == 2 else False)
|
||||
# value = 1 if value is True else (0 if value is False else value)
|
||||
# data_table[criterion].append(value)
|
||||
#
|
||||
# # convert True/False to 1/0 in data_table for both boolean and string representations
|
||||
# for criterion, values in data_table.items():
|
||||
# data_table[criterion] = [convert_bool(value) for value in values]
|
||||
# node_names = node_ids
|
||||
# relative_wr_data, immediate_wr_data = data.get('relativeWRData', []), data.get('immediateWRData', [])
|
||||
#
|
||||
# else: # There is not any node id match - Proceed only with the nodes from SAL's reply
|
||||
# print(f"No JSON file found for application ID {app_id}. Proceed only with data from SAL.")
|
||||
# extracted_data_SAL, node_ids_SAL, node_names_SAL = extract_SAL_node_candidate_data(sal_reply_body)
|
||||
# selected_criteria = ["Number of CPU Cores", "Memory Size"]
|
||||
# field_mapping = create_criteria_mapping(selected_criteria, extracted_data_SAL)
|
||||
# data_table = create_data_table(selected_criteria, extracted_data_SAL, field_mapping)
|
||||
# # Assign relativeWRData and immediateWRData regardless of node ID matches
|
||||
# relative_wr_data = []
|
||||
# immediate_wr_data = []
|
||||
# node_ids = node_ids_SAL
|
||||
# node_names = node_ids
|
||||
#
|
||||
# return data_table, relative_wr_data, immediate_wr_data, node_names, node_ids
|
||||
|
||||
|
||||
# Used to transform SAL's response before sending to DataGrid
|
||||
# This version is designed to read the structure of SAL's response obtained from POSTMAN
|
||||
def extract_node_candidate_data(json_file_path):
|
||||
with open(json_file_path, 'r') as file:
|
||||
json_data = json.load(file)
|
||||
|
||||
extracted_data = []
|
||||
node_ids = []
|
||||
node_names = []
|
||||
|
||||
for item in json_data:
|
||||
hardware_info = item.get("nodeCandidate", {}).get("hardware", {})
|
||||
node_data = {
|
||||
"name": item['name'],
|
||||
"id": item['id'],
|
||||
"nodeId": item.get("nodeCandidate", {}).get("nodeId"),
|
||||
"nodeCandidateType": item.get("nodeCandidate", {}).get("nodeCandidateType"),
|
||||
"price": item.get("nodeCandidate", {}).get("price", 0.0),
|
||||
"pricePerInvocation": item.get("nodeCandidate", {}).get("pricePerInvocation", 0.0),
|
||||
"memoryPrice": item.get("nodeCandidate", {}).get("memoryPrice", 0.0),
|
||||
"hardware": {
|
||||
"id": hardware_info.get("id"),
|
||||
"name": hardware_info.get("name"),
|
||||
"providerId": hardware_info.get("providerId"),
|
||||
"cores": hardware_info.get("cores"),
|
||||
"ram": hardware_info.get("ram") * 1024 if hardware_info.get("ram") else None, # Assuming RAM needs conversion from GB to MB
|
||||
"disk": hardware_info.get("disk"),
|
||||
"fpga": hardware_info.get("fpga")
|
||||
}
|
||||
}
|
||||
extracted_data.append(node_data)
|
||||
node_ids.append(item['id'])
|
||||
node_names.append(item.get('name', ''))
|
||||
|
||||
return extracted_data, node_ids, node_names
|
||||
|
||||
|
||||
# Works for dummy_node_data
|
||||
# def create_node_name(node_data):
|
||||
# # dummy_node_data = '''{
|
||||
# # "id": "8a7481d98e702b64018e702cbe070000",
|
||||
# # "nodeCandidateType": "EDGE",
|
||||
# # "jobIdForByon": null,
|
||||
# # "jobIdForEdge": "FCRnewLight0",
|
||||
# # "price": 0.0,
|
||||
# # "cloud": {
|
||||
# # "id": "edge",
|
||||
# # "endpoint": null,
|
||||
# # "cloudType": "EDGE",
|
||||
# # "api": null,
|
||||
# # "credential": null,
|
||||
# # "cloudConfiguration": {
|
||||
# # "nodeGroup": null,
|
||||
# # "properties": {}
|
||||
# # },
|
||||
# # "owner": "EDGE",
|
||||
# # "state": null,
|
||||
# # "diagnostic": null
|
||||
# # },
|
||||
# # "location": {
|
||||
# # "id": "edge-location-KmVf4xDJKL7acBGc",
|
||||
# # "name": null,
|
||||
# # "providerId": null,
|
||||
# # "locationScope": null,
|
||||
# # "isAssignable": null,
|
||||
# # "geoLocation": {
|
||||
# # "city": "Warsaw",
|
||||
# # "country": "Poland",
|
||||
# # "latitude": 52.237049,
|
||||
# # "longitude": 21.017532
|
||||
# # },
|
||||
# # "parent": null,
|
||||
# # "state": null,
|
||||
# # "owner": null
|
||||
# # },
|
||||
# # "image": {
|
||||
# # "id": "edge-image-KmVf4xDJKL7acBGc",
|
||||
# # "name": "edge-image-name-UBUNTU-UNKNOWN",
|
||||
# # "providerId": null,
|
||||
# # "operatingSystem": {
|
||||
# # "operatingSystemFamily": "UBUNTU",
|
||||
# # "operatingSystemArchitecture": "UNKNOWN",
|
||||
# # "operatingSystemVersion": 1804.00
|
||||
# # },
|
||||
# # "location": null,
|
||||
# # "state": null,
|
||||
# # "owner": null
|
||||
# # },
|
||||
# # "hardware": {
|
||||
# # "id": "edge-hardware-KmVf4xDJKL7acBGc",
|
||||
# # "name": null,
|
||||
# # "providerId": null,
|
||||
# # "cores": 1,
|
||||
# # "ram": 1,
|
||||
# # "disk": 1.0,
|
||||
# # "fpga": 0,
|
||||
# # "location": null,
|
||||
# # "state": null,
|
||||
# # "owner": null
|
||||
# # },
|
||||
# # "pricePerInvocation": 0.0,
|
||||
# # "memoryPrice": 0.0,
|
||||
# # "nodeId": null,
|
||||
# # "environment": null
|
||||
# # }'''
|
||||
# # node_data = json.loads(dummy_node_data)
|
||||
# # print("node_data in create node name")
|
||||
# # print(node_data)
|
||||
# node_type = node_data["nodeCandidateType"]
|
||||
# # print(node_type)
|
||||
# if node_data["location"]:
|
||||
# node_location = node_data["location"]["geoLocation"]
|
||||
# # print(json.dumps(node_location))
|
||||
# node_city = node_location["city"]
|
||||
# node_country = node_location["country"]
|
||||
# else:
|
||||
# node_city = ""
|
||||
# node_country = ""
|
||||
# node_os = node_data["image"]["operatingSystem"]["operatingSystemFamily"]
|
||||
# node_name = node_type + " - " + node_city + " , " + node_country + " - " + node_os
|
||||
# # print("node name crated: " + node_name)
|
||||
# return node_name
|
@ -1,8 +1,6 @@
|
||||
# ActiveMQ communication logic
|
||||
import sys
|
||||
import threading
|
||||
import json
|
||||
import time
|
||||
sys.path.insert(0,'../exn')
|
||||
import logging
|
||||
from dotenv import load_dotenv
|
||||
@ -18,9 +16,10 @@ from exn.core.handler import Handler
|
||||
from exn.handler.connector_handler import ConnectorHandler
|
||||
from User_Functions import *
|
||||
import uuid
|
||||
from Evaluation import perform_evaluation
|
||||
|
||||
# logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
||||
# logging.getLogger('exn.connector').setLevel(logging.CRITICAL)
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
||||
logging.getLogger('exn.connector').setLevel(logging.CRITICAL)
|
||||
|
||||
class SyncedHandler(Handler):
|
||||
def on_message(self, key, address, body, message: Message, context=None):
|
||||
@ -35,76 +34,57 @@ class SyncedHandler(Handler):
|
||||
|
||||
# Save the correlation_id (We do not have it from the app_side)
|
||||
uuid.uuid4().hex.encode("utf-8") # for Correlation id
|
||||
|
||||
correlation_id_optimizer = message.correlation_id
|
||||
if not correlation_id_optimizer:
|
||||
correlation_id_optimizer = '88334290cad34ad9b21eb468a9f8ff11' # dummy correlation_id
|
||||
|
||||
# logging.info(f"Optimizer_correlation_id {message.correlation_id}")
|
||||
print("Optimizer Correlation Id: ", correlation_id_optimizer)
|
||||
# print("Optimizer Correlation Id: ", correlation_id_optimizer)
|
||||
|
||||
# application_id_optimizer = message.properties.application # can be taken also from message.annotations.application
|
||||
application_id_optimizer = message.subject
|
||||
# application_id_optimizer = 'd535cf554ea66fbebfc415ac837a5828' #dummy application_id_optimizer
|
||||
print("Application Id: ", application_id_optimizer)
|
||||
# print("Application Id: ", application_id_optimizer)
|
||||
|
||||
try:
|
||||
# Read the Message Sent from Optimizer
|
||||
opt_message_data = body
|
||||
print("Whole Message Sent from Optimizer:", opt_message_data)
|
||||
# print("Whole Message Sent from Optimizer:", opt_message_data)
|
||||
|
||||
# Extract 'body' from opt_message_data
|
||||
body_sent_from_optimizer = opt_message_data.get('body', {})
|
||||
|
||||
# 100 Nodes
|
||||
## Example body
|
||||
# body_sent_from_optimizer = [
|
||||
# {
|
||||
# "type": "NodeTypeRequirement",
|
||||
# "nodeTypes": ["IAAS"],
|
||||
# "jobIdForByon": "dummy-app-id",
|
||||
# "jobIdForEDGE": "dummy-app-id"
|
||||
# }
|
||||
# ]
|
||||
|
||||
|
||||
# 58 Nodes
|
||||
# body_sent_from_optimizer = [
|
||||
# {
|
||||
# "type": "NodeTypeRequirement",
|
||||
# "nodeTypes": ["IAAS"],
|
||||
# "jobIdForByon": "dummy-app-id",
|
||||
# "jobIdForEDGE": "dummy-app-id"
|
||||
# },
|
||||
# {
|
||||
# "type": "AttributeRequirement",
|
||||
# "requirementClass": "hardware",
|
||||
# "requirementAttribute": "cores",
|
||||
# "requirementOperator": "EQ",
|
||||
# "value": "2"
|
||||
# },
|
||||
# {
|
||||
# "type": "AttributeRequirement",
|
||||
# "requirementClass": "hardware",
|
||||
# "requirementAttribute": "ram",
|
||||
# "requirementOperator": "EQ",
|
||||
# "value": "4096"
|
||||
# # "nodeTypes": ["EDGES"]
|
||||
# "nodeTypes": ["IAAS", "PAAS", "FAAS", "BYON", "EDGE", "SIMULATION"]
|
||||
# # ,"jobIdForEDGE": "FCRnewLight0"
|
||||
# }
|
||||
# # ,{
|
||||
# # "type": "AttributeRequirement",
|
||||
# # "requirementClass": "hardware",
|
||||
# # "requirementAttribute": "ram",
|
||||
# # "requirementOperator": "EQ",
|
||||
# # "value": "2"
|
||||
# # }
|
||||
# ]
|
||||
|
||||
# logging.info(body_sent_from_optimizer)
|
||||
# print("Extracted body from Optimizer Message:", body_sent_from_optimizer)
|
||||
print("Extracted body from Optimizer Message:", body_sent_from_optimizer)
|
||||
|
||||
## Prepare message to be send to SAL
|
||||
# Convert the body data to a JSON string
|
||||
# body_json_string = json.dumps(body_sent_from_optimizer) # For Sender
|
||||
body_json_string = body_sent_from_optimizer # For Optimizer
|
||||
|
||||
# body_json_string = json.dumps(body_sent_from_optimizer)
|
||||
body_json_string = body_sent_from_optimizer
|
||||
RequestToSal = { # Dictionary
|
||||
"metaData": {"user": "admin"}, # key [String "metaData"] value [dictionary]
|
||||
"body": body_json_string # key [String "body"] value [JSON String]
|
||||
}
|
||||
# logging.info("RequestToSal: %s", RequestToSal)
|
||||
print("RequestToSal:", RequestToSal)
|
||||
|
||||
# print("RequestToSal:", RequestToSal)
|
||||
# print("Is RequestToSal a valid dictionary:", isinstance(RequestToSal, dict))
|
||||
# print("Is the 'body' string in RequestToSal a valid JSON string:", is_json(RequestToSal["body"]))
|
||||
|
||||
@ -112,97 +92,73 @@ class SyncedHandler(Handler):
|
||||
sal_reply = context.publishers['SAL-GET'].send_sync(RequestToSal)
|
||||
|
||||
## Process SAL's Reply
|
||||
# sal_reply_body = sal_reply.get('body')
|
||||
sal_body = sal_reply.get('body') # Get the 'body' as a JSON string
|
||||
|
||||
# try:
|
||||
# # Parse the JSON string to a Python object
|
||||
# nodes_data = json.loads(sal_body)
|
||||
# total_nodes = len(nodes_data) # Get the total number of nodes
|
||||
#
|
||||
# # Check if more than 51 nodes exist
|
||||
# if total_nodes > 58:
|
||||
# print("More than 58 nodes exist. Only the first 51 nodes will be processed.")
|
||||
# # Filter to only include the first 51 nodes
|
||||
# sal_reply_body = nodes_data[:60]
|
||||
# else:
|
||||
# print(f"Total {total_nodes} nodes found. Processing all nodes.")
|
||||
# sal_reply_body = sal_reply.get('body')
|
||||
#
|
||||
# except json.JSONDecodeError as e:
|
||||
# print(f"Error parsing JSON: {e}")
|
||||
|
||||
|
||||
# filename = 'SAL_Response_10EdgeDevs.json'
|
||||
# with open(filename, 'r') as file:
|
||||
# sal_reply_body = json.load(file)
|
||||
# print("SAL's Reply from JSON File:", sal_reply_body)
|
||||
|
||||
try:
|
||||
# Parse the JSON string to a Python object
|
||||
nodes_data = json.loads(sal_body)
|
||||
# Check if there is any error in SAL's reply body
|
||||
if 'key' in nodes_data and any(keyword in nodes_data['key'].lower() for keyword in ['error', 'exception']):
|
||||
print("Error found in message body:", nodes_data['message'])
|
||||
sal_reply_body = []
|
||||
else: # No error found in SAL's reply body
|
||||
total_nodes = len(nodes_data) # Get the total number of nodes
|
||||
print("Total Nodes in SAL's reply:", total_nodes)
|
||||
|
||||
# Check if more than 58 nodes exist
|
||||
if total_nodes > 400:
|
||||
print("More than 58 nodes exist. Only the first 51 nodes will be processed.")
|
||||
# Filter to only include the first 51 nodes and convert back to JSON string
|
||||
if total_nodes > 400: # Check if more than 400 nodes received
|
||||
print("More than 400 nodes returned from SAL.")
|
||||
# Filter to only include the first 400 nodes and convert back to JSON string
|
||||
sal_reply_body = json.dumps(nodes_data[:400])
|
||||
else:
|
||||
print(f"Total {total_nodes} nodes found. Processing all nodes.")
|
||||
elif total_nodes > 0 and total_nodes <= 400:
|
||||
print(f"Total {total_nodes} nodes returned from SAL. Processing all nodes.")
|
||||
# Keep sal_reply_body as is since it's already a JSON string
|
||||
sal_reply_body = sal_body
|
||||
else:
|
||||
print(f"Total {total_nodes} nodes returned from SAL.")
|
||||
sal_reply_body = []
|
||||
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"Error parsing JSON: {e}")
|
||||
sal_reply_body = "[]" # Default to an empty JSON array as a string in case of error
|
||||
print(f"Error parsing JSON reply from SAL: {e}")
|
||||
sal_reply_body = [] # Default to an empty JSON array as a string in case of error
|
||||
|
||||
if sal_reply_body: # Check whether SAL's reply body is empty
|
||||
# logging.info(f"Whole reply Received from SAL: {sal_reply}")
|
||||
# logging.info(f"Reply Received from SAL: {sal_reply}")
|
||||
# print("SAL reply Body:", sal_reply_body)
|
||||
|
||||
# Check the number of nodes before Evaluation
|
||||
if total_nodes > 1:
|
||||
# Search for application_id, Read JSON and create data to pass to Evaluation
|
||||
if check_json_file_exists(application_id_optimizer): # Application JSON exist in DB
|
||||
print(f"JSON file for application ID {application_id_optimizer} exists.")
|
||||
node_ids = extract_SAL_node_candidate_data(sal_reply)[2] # 0,1,2nd Position returns the function
|
||||
# node_ids = ['8a7482868df473cc018df47d8ea60003', '8a7482868df473cc018df47d8fc70005', '8a7482868df473cc018df47d90e70007', '8a7482868df473cc018df47d92090009', '8a7482868df473cc018df47d9326000b', '8a7482868df473cc018df47d9445000d', '8a7482868df473cc018df47d957f000f', '8a7482868df473cc018df47d96a50011', '8a7482868df473cc018df47d97c70013', '8a7482868df473cc018df47d98e30015']
|
||||
# print("node_ids_SAL:", node_ids_SAL)
|
||||
# Check if there are differences in available nodes between saved data in JSON file and SAL's reply
|
||||
data_table, relative_wr_data, immediate_wr_data, node_names, node_ids = read_application_data(application_id_optimizer, sal_reply_body)
|
||||
# print("sal_reply_body:", sal_reply_body)
|
||||
# print("data_table filtered from JSON and SAL:", data_table)
|
||||
# print("node_ids filtered from JSON and SAL:", node_ids)
|
||||
# print("relative_wr_data:", relative_wr_data)
|
||||
# print("immediate_wr_data:", immediate_wr_data)
|
||||
# print("node_names filtered from JSON and SAL:", node_names)
|
||||
|
||||
# Check if there is any difference in available nodes between saved data in DB and SAL's reply
|
||||
data_table, relative_wr_data, immediate_wr_data, node_names = read_application_data(application_id_optimizer, node_ids)
|
||||
if not node_names:
|
||||
node_names = node_ids
|
||||
print("data_table filtered from DB:", data_table)
|
||||
print("node_ids filtered from DB:", node_ids)
|
||||
print("node_names filtered from DB:", node_names)
|
||||
|
||||
# I need to use the most updated data for nodes sent from SAL,
|
||||
# I can modify the function to retrieve only WR info but there is a problem if other criteria are used
|
||||
# Maybe I have to use the new data only for the criteria with data coming from SAL and the saved ones for the
|
||||
# rest criteria
|
||||
# In case a new node sent from SAL which I have not data saved, then do not consider it if also other crieria
|
||||
# exist rather than the ones
|
||||
|
||||
else: # Application JSON does not exist in DB
|
||||
else: # Application does not exist in directory
|
||||
print(f"JSON file for application ID {application_id_optimizer} does not exist.")
|
||||
# Read data from SAL's response by calling the function extract_node_candidate_data()
|
||||
# extracted_data, number_of_nodes, node_ids, node_names = extract_node_candidate_data('SAL_Response_11EdgeDevs.json')
|
||||
extracted_data, number_of_nodes, node_ids, node_names = extract_SAL_node_candidate_data(sal_reply_body)
|
||||
# print("extracted_data:", extracted_data)
|
||||
print("node_ids:", node_ids)
|
||||
# extracted_data_SAL, node_ids, node_names = extract_node_candidate_data('SAL_Response_11EdgeDevs.json')
|
||||
extracted_data_SAL, node_ids, node_names = extract_SAL_node_candidate_data(sal_reply_body)
|
||||
# print("extracted_data_SAL:", extracted_data_SAL)
|
||||
# print("node_ids:", node_ids)
|
||||
|
||||
# Use the create_criteria_mapping() to get the criteria mappings
|
||||
# selected_criteria = ["Operating cost", "Memory Price", "Number of CPU Cores", "Memory Size", "Storage Capacity"]
|
||||
selected_criteria = ["Number of CPU Cores", "Memory Size"]
|
||||
field_mapping = create_criteria_mapping(selected_criteria, extracted_data)
|
||||
field_mapping = create_criteria_mapping()
|
||||
# Create data_table:
|
||||
data_table = create_data_table(selected_criteria, extracted_data, field_mapping)
|
||||
data_table = create_data_table(selected_criteria, extracted_data_SAL, field_mapping)
|
||||
relative_wr_data = []
|
||||
immediate_wr_data = []
|
||||
print("created_data_table:", data_table)
|
||||
# print("created_data_table:", data_table)
|
||||
|
||||
# Check the number of nodes before Evaluation
|
||||
print("There are " + str(len(node_ids)) + " elements in node_ids")
|
||||
print("There are " + str(len(node_ids)) + " nodes for Evaluation")
|
||||
|
||||
## Run evaluation
|
||||
evaluation_results = perform_evaluation(data_table, relative_wr_data, immediate_wr_data, node_names, node_ids)
|
||||
@ -211,24 +167,26 @@ class SyncedHandler(Handler):
|
||||
## Extract and save the results
|
||||
# ScoresAndRanks = evaluation_results['results']
|
||||
ScoresAndRanks = evaluation_results.get('results', [])
|
||||
print("Scores and Ranks:", ScoresAndRanks)
|
||||
# print("Scores and Ranks:", ScoresAndRanks)
|
||||
|
||||
# Append the Score and Rank of each node to SAL's Response
|
||||
SAL_and_Scores_Body = append_evaluation_results(sal_reply_body, ScoresAndRanks)
|
||||
# SAL_and_Scores_Body = append_evaluation_results('SAL_Response_11EdgeDevs.json', ScoresAndRanks)
|
||||
# print("SAL_and_Scores_Body:", SAL_and_Scores_Body)
|
||||
else:
|
||||
print("There is only one node!")
|
||||
# Append the Score and Rank of each node to SAL's Response
|
||||
SAL_and_Scores_Body = append_evaluation_results(sal_reply_body, [])
|
||||
|
||||
## Prepare message to be sent to OPTIMIZER
|
||||
# CFSBResponse = read_dummy_response_data_toOpt('CFSB_Body_Response.json') # Data and Scores for 5 Nodes
|
||||
|
||||
CFSBResponse = {
|
||||
"metaData": {"user": "admin"},
|
||||
"body": SAL_and_Scores_Body
|
||||
}
|
||||
print("CFSBResponse:", CFSBResponse)
|
||||
|
||||
# print("CFSBResponse:", CFSBResponse)
|
||||
# Writing the formatted JSON to a json file
|
||||
formatted_json = json.dumps(CFSBResponse, indent=4)
|
||||
# Writing the formatted JSON to a file named test.json
|
||||
with open('CFSBResponse.json', 'w') as file:
|
||||
file.write(formatted_json)
|
||||
print("Formatted JSON has been saved to CFSBResponse.json")
|
||||
@ -248,6 +206,14 @@ class SyncedHandler(Handler):
|
||||
logging.error(f"Failed to parse message body from Optimizer as JSON: {e}")
|
||||
|
||||
|
||||
|
||||
def requestSAL(self, RequestToSal):
|
||||
sal_reply = Context.publishers['SAL-GET'].send_sync(RequestToSal)
|
||||
# Process SAL's Reply
|
||||
sal_body = sal_reply.get('body') # Get the 'body' as a JSON string
|
||||
# print("sal_body requestSAL function:", sal_body)
|
||||
return sal_body
|
||||
|
||||
class Bootstrap(ConnectorHandler):
|
||||
context = None
|
||||
def ready(self, context: Context):
|
||||
@ -279,6 +245,13 @@ def start_exn_connector_in_background():
|
||||
thread.daemon = True # Daemon threads will shut down immediately when the program exits
|
||||
thread.start()
|
||||
|
||||
|
||||
def call_publisher(body):
|
||||
handler = SyncedHandler()
|
||||
request = handler.requestSAL(body)
|
||||
return request
|
||||
|
||||
|
||||
# Used to read dummy response and send to Optimizer using JSON
|
||||
# I have already sent to Optimizer using this function
|
||||
def read_dummy_response_data_toOpt(file_path):
|
||||
@ -291,6 +264,7 @@ def read_dummy_response_data_toOpt(file_path):
|
||||
}
|
||||
return encapsulated_data
|
||||
|
||||
|
||||
def is_json(myjson):
|
||||
try:
|
||||
json_object = json.loads(myjson)
|
||||
|
@ -1,11 +1,7 @@
|
||||
from app_factory import create_app
|
||||
from dotenv import load_dotenv
|
||||
from activemq import start_exn_connector_in_background
|
||||
from activemqOLD import start_exn_connector_in_background1
|
||||
from app_factory import create_app # Import your Flask app factory
|
||||
|
||||
load_dotenv()
|
||||
|
||||
app = create_app()
|
||||
# Start the EXN connector in the background
|
||||
start_exn_connector_in_background()
|
||||
|
@ -1,26 +0,0 @@
|
||||
version: '3.0'
|
||||
services:
|
||||
web:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- "8001:8001"
|
||||
env_file:
|
||||
- .env.prod
|
||||
depends_on:
|
||||
- db
|
||||
db:
|
||||
image: postgres:16
|
||||
ports:
|
||||
- "5432:5432"
|
||||
environment:
|
||||
- POSTGRES_USER=dbuser
|
||||
- POSTGRES_PASSWORD=pass123
|
||||
- POSTGRES_DB=fog_broker
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data/
|
||||
- ./db/db_script.sql:/docker-entrypoint-initdb.d/db_script.sql
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
80
cfsb-backend/edge_data_ipat.json
Normal file
80
cfsb-backend/edge_data_ipat.json
Normal file
@ -0,0 +1,80 @@
|
||||
{
|
||||
"_id": "b4ce322c-698a-43b9-a889-bf0da2a4dcb9",
|
||||
"os": "LINUX",
|
||||
"name": "Test VM #0001",
|
||||
"owner": "admin",
|
||||
"ipAddress": "10.10.0.6",
|
||||
"location": {
|
||||
"name": "laptop",
|
||||
"latitude": 12.345,
|
||||
"longitude": 56.789
|
||||
},
|
||||
"username": "ubuntu",
|
||||
"password": [
|
||||
"u",
|
||||
"b",
|
||||
"u",
|
||||
"n",
|
||||
"t",
|
||||
"u"
|
||||
],
|
||||
"publicKey": [],
|
||||
"deviceInfo": {
|
||||
"CPU_SOCKETS": "1",
|
||||
"CPU_CORES": "10",
|
||||
"CPU_PROCESSORS": "20",
|
||||
"RAM_TOTAL_KB": "16218480",
|
||||
"RAM_AVAILABLE_KB": "13366788",
|
||||
"RAM_FREE_KB": "10943372",
|
||||
"RAM_USED_KB": "5275108",
|
||||
"RAM_UTILIZATION": "32.5253",
|
||||
"DISK_TOTAL_KB": "1055762868",
|
||||
"DISK_FREE_KB": "976527612",
|
||||
"DISK_USED_KB": "79235256",
|
||||
"DISK_UTILIZATION": "7.50502",
|
||||
"OS_ARCHITECTURE": "x86_64",
|
||||
"OS_KERNEL": "Linux",
|
||||
"OS_KERNEL_RELEASE": "5.15.133.1-microsoft-standard-WSL2"
|
||||
},
|
||||
"requestId": "eb6441fc-613a-482e-ba94-b16db57ecd36",
|
||||
"creationDate": "2024-01-15T13:23:40.602Z",
|
||||
"lastUpdateDate": "2024-01-15T14:32:43.485Z",
|
||||
"status": "HEALTHY",
|
||||
"nodeReference": "40ed1989-49ba-4496-a5c5-3d8ca1a18972",
|
||||
"messages": [],
|
||||
"statusUpdate": {
|
||||
"ipAddress": "10.10.0.6",
|
||||
"clientId": "VM-LINUX-TEST-VM-0001-Test VM #0001-DEFAULT-10.10.0.6-_",
|
||||
"state": "REGISTERED",
|
||||
"stateLastUpdate": "2024-01-15T13:23:47.463Z",
|
||||
"reference": "40ed1989-49ba-4496-a5c5-3d8ca1a18972",
|
||||
"errors": []
|
||||
},
|
||||
"metrics": {
|
||||
"ipAddress": "10.10.0.6",
|
||||
"clientId": "VM-LINUX-TEST-VM-0001-Test VM",
|
||||
"timestamp": "2024-01-15T14:32:33.467Z",
|
||||
"metrics": {
|
||||
"count-total-events-failures": 0,
|
||||
"count-total-events-text": 0,
|
||||
"tx": 0,
|
||||
"count-total-events-other": 0,
|
||||
"count-event-forwards-success": 0,
|
||||
"count-event-forwards-failure": 0,
|
||||
"rx": 0,
|
||||
"count-total-events": 0,
|
||||
"cpu": 0.6,
|
||||
"uptime": 10742,
|
||||
"count-event-local-publish-failure": 0,
|
||||
"count-total-events-object": 0,
|
||||
"disk": 2.48262,
|
||||
"count-event-local-publish-success": 0,
|
||||
"updatetime": 1705318391,
|
||||
"currdatetime": 1705329133,
|
||||
"ram": 23.7719
|
||||
},
|
||||
"latestEvents": []
|
||||
},
|
||||
"retries": 0,
|
||||
"_class": "eu.nebulous.resource.discovery.monitor.model.Device"
|
||||
}
|
@ -7,9 +7,11 @@ from data_types import get_attr_data_type
|
||||
import db.db_functions as db_functions
|
||||
import os
|
||||
import time
|
||||
import get_data as file
|
||||
import activemq
|
||||
# from activemq import connector_handler
|
||||
import traceback
|
||||
import logging
|
||||
# logging.disable(logging.CRITICAL)
|
||||
|
||||
main_routes = Blueprint('main', __name__)
|
||||
|
||||
@ -19,6 +21,74 @@ NoData_Variables = ['attr-security', 'attr-performance-capacity', 'attr-performa
|
||||
Cont_Variables = ['attr-performance', 'attr-financial', 'attr-performance-capacity-memory',
|
||||
'attr-performance-capacity-memory-speed']
|
||||
|
||||
dummy_node_data = {
|
||||
"id": "8a7481d98e702b64018e702cbe070000",
|
||||
"nodeCandidateType": "EDGE",
|
||||
"jobIdForByon": "",
|
||||
"jobIdForEdge": "FCRnewLight0",
|
||||
"price": 0.0,
|
||||
"cloud": {
|
||||
"id": "edge",
|
||||
"endpoint": "",
|
||||
"cloudType": "EDGE",
|
||||
"api": "",
|
||||
"credential": "",
|
||||
"cloudConfiguration": {
|
||||
"nodeGroup": "",
|
||||
"properties": {}
|
||||
},
|
||||
"owner": "EDGE",
|
||||
"state": "",
|
||||
"diagnostic": ""
|
||||
},
|
||||
"location": {
|
||||
"id": "edge-location-KmVf4xDJKL7acBGc",
|
||||
"name": "",
|
||||
"providerId": "",
|
||||
"locationScope": "",
|
||||
"isAssignable": "",
|
||||
"geoLocation": {
|
||||
"city": "Warsaw",
|
||||
"country": "Poland",
|
||||
"latitude": 52.237049,
|
||||
"longitude": 21.017532
|
||||
},
|
||||
"parent": "",
|
||||
"state": "",
|
||||
"owner": ""
|
||||
},
|
||||
"image": {
|
||||
"id": "edge-image-KmVf4xDJKL7acBGc",
|
||||
"name": "edge-image-name-UBUNTU-UNKNOWN",
|
||||
"providerId": "",
|
||||
"operatingSystem": {
|
||||
"operatingSystemFamily": "UBUNTU",
|
||||
"operatingSystemArchitecture": "UNKNOWN",
|
||||
"operatingSystemVersion": 1804.00
|
||||
},
|
||||
"location": "",
|
||||
"state": "",
|
||||
"owner": ""
|
||||
},
|
||||
"hardware": {
|
||||
"id": "edge-hardware-KmVf4xDJKL7acBGc",
|
||||
"name": "",
|
||||
"providerId": "",
|
||||
"cores": 1,
|
||||
"ram": 1,
|
||||
"disk": 1.0,
|
||||
"fpga": 0,
|
||||
"location": "",
|
||||
"state": "",
|
||||
"owner": ""
|
||||
},
|
||||
"pricePerInvocation": 0.0,
|
||||
"memoryPrice": 0.0,
|
||||
"nodeId": "",
|
||||
"environment": ""
|
||||
}
|
||||
|
||||
|
||||
#Used in HomePage.vue to save app_id and user_id
|
||||
# @main_routes.route('/save_ids', methods=['POST'])
|
||||
# def save_ids():
|
||||
@ -34,7 +104,6 @@ Cont_Variables = ['attr-performance', 'attr-financial', 'attr-performance-capaci
|
||||
#Used in CriteriaSelection.vue
|
||||
@main_routes.route('/get_hierarchical_category_list')
|
||||
def get_hierarchical_category_list():
|
||||
# TODO order by title in every level
|
||||
items_list = file.get_level_1_items() # Assume this function returns the list correctly
|
||||
if items_list is not None:
|
||||
# Return the list as a JSON response
|
||||
@ -48,101 +117,196 @@ def get_hierarchical_category_list():
|
||||
def process_selected_criteria():
|
||||
try:
|
||||
data = request.json
|
||||
# Selected Criteria by the User from the List
|
||||
selected_criteria = data.get('selectedItems', [])
|
||||
# Extract app_id, user_id
|
||||
application_id = data.get('app_id') # Take it from local storage from frontend
|
||||
# application_id = 'd535cf554ea66fbebfc415ac837a5828' #dummy application_id_optimizer
|
||||
user_id = data.get('user_id') # Take it from local storage from frontend
|
||||
print("user_id:", user_id)
|
||||
print("application_id:", application_id)
|
||||
|
||||
## Prepare message to be send to SAL
|
||||
message_for_SAL = [ # User side so ask SAL for every available node
|
||||
{
|
||||
# application_id = data.get('app_id')
|
||||
# user_id = data.get('user_id')
|
||||
# print("user_id:", user_id)
|
||||
# print("application_id:", application_id)
|
||||
|
||||
message_for_SAL = [{
|
||||
"type": "NodeTypeRequirement",
|
||||
"nodeTypes": ["IAAS", "PAAS", "FAAS", "BYON", "EDGE", "SIMULATION"]
|
||||
# "jobIdForEDGE": "FCRnewLight0"
|
||||
}
|
||||
"nodeTypes": ["IAAS", "PAAS", "FAAS", "BYON", "EDGE", "SIMULATION"]}
|
||||
# ,{
|
||||
# "type": "AttributeRequirement",
|
||||
# "requirementClass": "hardware",
|
||||
# "requirementAttribute": "cores",
|
||||
# "requirementOperator": "GEQ",
|
||||
# "value": "64"
|
||||
# },
|
||||
# {
|
||||
# "type": "AttributeRequirement",
|
||||
# "requirementClass": "hardware",
|
||||
# "requirementAttribute": "ram",
|
||||
# "requirementOperator": "GEQ",
|
||||
# "value": "33000"
|
||||
# }
|
||||
]
|
||||
# Convert the body data to a JSON string
|
||||
body_json_string = json.dumps(message_for_SAL)
|
||||
body_json_string_for_SAL = json.dumps(message_for_SAL)
|
||||
|
||||
RequestToSal = { # Dictionary
|
||||
"metaData": {"user": "admin"}, # key [String "metaData"] value [dictionary]
|
||||
"body": body_json_string # key [String "body"] value [JSON String]
|
||||
RequestToSal = {
|
||||
"metaData": {"user": "admin"},
|
||||
"body": body_json_string_for_SAL
|
||||
}
|
||||
print("RequestToSal:", RequestToSal)
|
||||
# print("RequestToSal:", RequestToSal)
|
||||
|
||||
# print("Is RequestToSal a valid dictionary:", isinstance(RequestToSal, dict))
|
||||
# print("Is the 'body' string in RequestToSal a valid JSON string:", is_json(RequestToSal["body"]))
|
||||
sal_reply = activemq.call_publisher(RequestToSal)
|
||||
nodes_data = json.loads(sal_reply) if isinstance(sal_reply, str) else sal_reply
|
||||
# print("nodes_data", nodes_data)
|
||||
|
||||
## Request the node candidates from SAL
|
||||
# sal_reply = activemq.context.publishers['SAL-GET'].send_sync(RequestToSal)
|
||||
extracted_data, node_ids, node_names = extract_SAL_node_candidate_data_Front(nodes_data)
|
||||
# print("extracted_data:", extracted_data)
|
||||
field_mapping = create_criteria_mapping()
|
||||
# print("field_mapping", field_mapping)
|
||||
|
||||
## Process SAL's Reply
|
||||
# extracted_data, number_of_nodes, node_ids, node_names = extract_SAL_node_candidate_data(sal_reply)
|
||||
# extracted_data, number_of_nodes, node_names = extract_node_candidate_data('dummy_data_node_candidates.json')
|
||||
extracted_data, number_of_nodes, node_ids, node_names = extract_node_candidate_data('SAL_Response_11EdgeDevs.json')
|
||||
print("extracted_data:", extracted_data)
|
||||
default_list_criteria_mapping = {
|
||||
# "Cost": "price",
|
||||
"Operating cost": "price",
|
||||
"Memory Price": "memoryPrice",
|
||||
"Number of CPU Cores": "cores",
|
||||
"Memory Size": "ram",
|
||||
"Storage Capacity": "disk"
|
||||
}
|
||||
|
||||
# Use the create_criteria_mapping() to get the criteria mappings
|
||||
field_mapping = create_criteria_mapping(selected_criteria, extracted_data)
|
||||
grid_data = {name: [] for name in node_names}
|
||||
grid_data = {}
|
||||
|
||||
# Prepare the data to be sent to DataGrid.vue
|
||||
# Blank by default for the Selected Criteria not found in mapping
|
||||
for node_data in extracted_data:
|
||||
node_name = node_data.get('name') # Using name to match
|
||||
node_id = node_data.get('id') # Extract the node ID
|
||||
grid_data[node_name] = {"id": node_id, "criteria": []}
|
||||
node_id = node_data.get('id')
|
||||
# print("Before create_node_name")
|
||||
node_name = create_node_name(node_data) if node_data else "Unknown"
|
||||
# print("After create_node_name")
|
||||
|
||||
if node_name in grid_data: # Check if node_name exists in grid_data keys
|
||||
for item in selected_criteria:
|
||||
criterion_data = {}
|
||||
criterion_data["data_type"] = get_attr_data_type(item)
|
||||
item_data_dict = file.get_subject_data(file.SMI_prefix + item)
|
||||
criterion_data["title"] = item_data_dict["title"]
|
||||
field_name = field_mapping.get(criterion_data["title"], item)
|
||||
if node_id and node_id not in grid_data:
|
||||
grid_data[node_id] = {"name": node_name, "criteria": []}
|
||||
|
||||
# Check if the field_name is a direct key or nested inside 'hardware'
|
||||
if field_name in node_data:
|
||||
value = node_data[field_name]
|
||||
elif 'hardware' in node_data and field_name in node_data['hardware']:
|
||||
value = node_data['hardware'][field_name]
|
||||
hardware_info = node_data.get('hardware', {}) # contains the values for criteria coming from SAL
|
||||
|
||||
for criterion_key in selected_criteria:
|
||||
# print("criterion_key:", criterion_key)
|
||||
criterion_info = file.get_subject_data(file.SMI_prefix + criterion_key) # It contains the titles of the criteria
|
||||
# print("criterion_info:", criterion_info)
|
||||
|
||||
# Resolve title and then map title to field name
|
||||
criterion_data_type = get_attr_data_type(criterion_key) # criterion_data_type: {'type': 1, 'values': ['Low', 'Medium', 'High']}
|
||||
# print("criterion_data_type:", criterion_data_type)
|
||||
criterion_title = criterion_info["title"]
|
||||
|
||||
# Fetch the values of the selected default criteria
|
||||
if criterion_title in default_list_criteria_mapping:
|
||||
SAL_criterion_name = field_mapping.get(criterion_title) # Map the criterion title with the criterion name in SAL's reply
|
||||
value = hardware_info.get(SAL_criterion_name, "N/A") # Get the criterion values
|
||||
else:
|
||||
# Generate random or default values for unmapped criteria or missing data
|
||||
item_data_type_value = criterion_data["data_type"].get('type')
|
||||
if item_data_type_value == 1:
|
||||
# Handle other criteria (this part may need adjustment based on your actual data structure)
|
||||
# value = "N/A" # Placeholder for the logic to determine non-default criteria values
|
||||
# Generate random or default values for rest criteria
|
||||
type_value = criterion_data_type['type']
|
||||
# print("type_value:", type_value)
|
||||
|
||||
if type_value == 1:
|
||||
value = random.choice(["High", "Medium", "Low"])
|
||||
elif item_data_type_value == 5:
|
||||
elif type_value == 5:
|
||||
value = random.choice(["True", "False"])
|
||||
else:
|
||||
value = round(random.uniform(1, 100), 2)
|
||||
|
||||
criterion_data["value"] = value if value != 0 else 0.00001
|
||||
# grid_data[node_id].append(criterion_data)
|
||||
# grid_data[node_name].append(criterion_data) # Use node_name as key
|
||||
grid_data[node_name]["criteria"].append(criterion_data)
|
||||
criterion_data = {
|
||||
"title": criterion_title,
|
||||
"value": value,
|
||||
"data_type": criterion_data_type # criterion_data_type: {'type': 1, 'values': ['Low', 'Medium', 'High']}
|
||||
}
|
||||
grid_data[node_id]["criteria"].append(criterion_data)
|
||||
|
||||
# Conversion to list format remains unchanged
|
||||
# grid_data_with_names = [{'name': name, 'criteria': data} for name, data in grid_data.items()]
|
||||
grid_data_with_names = [{'name': name, 'id': data["id"], 'criteria': data["criteria"]} for name, data in grid_data.items()]
|
||||
print("grid_data_with_names:", grid_data_with_names)
|
||||
grid_data_with_names = [{
|
||||
'name': data["name"],
|
||||
'id': node_id,
|
||||
'criteria': data["criteria"]
|
||||
} for node_id, data in grid_data.items()]
|
||||
# print("grid_data_with_names:", grid_data_with_names)
|
||||
|
||||
# Send the comprehensive grid_data_with_names to the frontend
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'gridData': grid_data_with_names,
|
||||
'NodeNames': node_names
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error processing selected items: {e}")
|
||||
traceback.print_exc()
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
|
||||
# Used in WR.vue
|
||||
# Works by reading a JSON file with dummy data
|
||||
# def process_selected_criteria():
|
||||
# try:
|
||||
# data = request.json
|
||||
# # Selected Criteria by the User from the List
|
||||
# selected_criteria = data.get('selectedItems', [])
|
||||
# # Extract app_id, user_id
|
||||
# application_id = data.get('app_id') # Take it from local storage from frontend
|
||||
# # application_id = 'd535cf554ea66fbebfc415ac837a5828' #dummy application_id_optimizer
|
||||
# user_id = data.get('user_id') # Take it from local storage from frontend
|
||||
# print("user_id:", user_id)
|
||||
# print("application_id:", application_id)
|
||||
#
|
||||
# ## Process SAL's Reply
|
||||
# # extracted_data, number_of_nodes, node_names = extract_node_candidate_data('dummy_data_node_candidates.json')
|
||||
# extracted_data, node_ids, node_names = extract_node_candidate_data('SAL_Response_11EdgeDevs.json')
|
||||
# print("extracted_data:", extracted_data)
|
||||
#
|
||||
# # Use the create_criteria_mapping() to get the criteria mappings
|
||||
# field_mapping = create_criteria_mapping(selected_criteria, extracted_data)
|
||||
# grid_data = {name: [] for name in node_names}
|
||||
#
|
||||
# # Prepare the data to be sent to DataGrid.vue
|
||||
# for node_data in extracted_data:
|
||||
# node_name = node_data.get('name') # Using name to match
|
||||
# node_id = node_data.get('id') # Extract the node ID
|
||||
# grid_data[node_name] = {"id": node_id, "criteria": []}
|
||||
#
|
||||
# if node_name in grid_data: # Check if node_name exists in grid_data keys
|
||||
# for item in selected_criteria:
|
||||
# criterion_data = {}
|
||||
# criterion_data["data_type"] = get_attr_data_type(item)
|
||||
# item_data_dict = file.get_subject_data(file.SMI_prefix + item)
|
||||
# criterion_data["title"] = item_data_dict["title"]
|
||||
# field_name = field_mapping.get(criterion_data["title"], item)
|
||||
#
|
||||
# # Check if the field_name is a direct key or nested inside 'hardware'
|
||||
# if field_name in node_data:
|
||||
# value = node_data[field_name]
|
||||
# elif 'hardware' in node_data and field_name in node_data['hardware']:
|
||||
# value = node_data['hardware'][field_name]
|
||||
# else:
|
||||
# # Generate random or default values for unmapped criteria or missing data
|
||||
# item_data_type_value = criterion_data["data_type"].get('type')
|
||||
# if item_data_type_value == 1:
|
||||
# value = random.choice(["High", "Medium", "Low"])
|
||||
# elif item_data_type_value == 5:
|
||||
# value = random.choice(["True", "False"])
|
||||
# else:
|
||||
# value = round(random.uniform(1, 100), 2)
|
||||
#
|
||||
# criterion_data["value"] = value if value != 0 else 0.00001
|
||||
# # grid_data[node_id].append(criterion_data)
|
||||
# grid_data[node_name]["criteria"].append(criterion_data)
|
||||
#
|
||||
# # Conversion to list format remains unchanged
|
||||
# # grid_data_with_names = [{'name': name, 'criteria': data} for name, data in grid_data.items()]
|
||||
# grid_data_with_names = [{'name': name, 'id': data["id"], 'criteria': data["criteria"]} for name, data in grid_data.items()]
|
||||
# print("grid_data_with_names:", grid_data_with_names)
|
||||
#
|
||||
# # Send the comprehensive grid_data_with_names to the frontend
|
||||
# return jsonify({
|
||||
# 'success': True,
|
||||
# 'gridData': grid_data_with_names,
|
||||
# 'NodeNames': node_names
|
||||
# })
|
||||
# except Exception as e:
|
||||
# print(f"Error processing selected items: {e}")
|
||||
# traceback.print_exc()
|
||||
# return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
|
||||
|
||||
@main_routes.route('/process-evaluation-data', methods=['POST'])
|
||||
def process_evaluation_data():
|
||||
try:
|
||||
@ -150,14 +314,15 @@ def process_evaluation_data():
|
||||
if data is None:
|
||||
raise ValueError("Received data is not in JSON format or 'Content-Type' header is not set to 'application/json'")
|
||||
|
||||
print("JSON data:", data)
|
||||
# print("JSON in process_evaluation_data:", data)
|
||||
# Transform grid data to table and get node names directly from the function
|
||||
data_table, relative_wr_data, immediate_wr_data, node_names, node_ids = transform_grid_data_to_table(data)
|
||||
|
||||
# print("data_table:", data_table)
|
||||
# print("data_table FRONT:", data_table)
|
||||
# print("relative_wr_data:", relative_wr_data)
|
||||
# print("immediate_wr_data:", immediate_wr_data)
|
||||
# print("node_names:", node_names)
|
||||
# print("# node_names:", len(node_names))
|
||||
# print("# node_ids:", len(node_ids))
|
||||
|
||||
# Run Optimization - Perform evaluation
|
||||
results = perform_evaluation(data_table, relative_wr_data, immediate_wr_data, node_names, node_ids)
|
||||
|
140
cfsb-backend/test.py
Normal file
140
cfsb-backend/test.py
Normal file
@ -0,0 +1,140 @@
|
||||
# ActiveMQ communication logic via EXN library
|
||||
import sys
|
||||
import threading
|
||||
import json
|
||||
import time
|
||||
sys.path.insert(0,'../exn')
|
||||
import logging
|
||||
from dotenv import load_dotenv
|
||||
load_dotenv()
|
||||
from proton import Message
|
||||
from exn import core
|
||||
from exn.connector import EXN
|
||||
from exn.core.consumer import Consumer
|
||||
from exn.core.synced_publisher import SyncedPublisher
|
||||
from exn.core.publisher import Publisher
|
||||
from exn.core.context import Context
|
||||
from exn.core.handler import Handler
|
||||
from exn.handler.connector_handler import ConnectorHandler
|
||||
from User_Functions import *
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
||||
logging.getLogger('exn.connector').setLevel(logging.DEBUG)
|
||||
|
||||
|
||||
class SyncedHandler(Handler):
|
||||
def on_message(self, key, address, body, message: Message, context=None):
|
||||
logging.info(f"[SyncedHandler] Received {key} => {address}: {body}")
|
||||
logging.info("on_message in SyncedHandler is executed")
|
||||
logging.info(f"[body] {body}")
|
||||
|
||||
# Triggered by OPTIMIZER, Get app id, correlation id and filters
|
||||
# if address == "topic://eu.nebulouscloud.cfsb.get_node_candidates":
|
||||
if key == "OPT-triggering":
|
||||
logging.info("Entered in OPT-triggering Key")
|
||||
|
||||
# Save the correlation_id (We do not have it from the app_side)
|
||||
# Optimizer_correlation_id = '88334290cad34ad9b21eb468a9f8ff11' # dummy correlation_id
|
||||
Optimizer_correlation_id = message.correlation_id
|
||||
logging.info(f"Optimizer_correlation_id {message.correlation_id}")
|
||||
application_id = message.subject # can be taken also from message.annotations.application
|
||||
|
||||
try:
|
||||
opt_message_data = body
|
||||
print("Message from Optimizer:", opt_message_data)
|
||||
|
||||
# Extract 'body' from opt_message_data
|
||||
# opt_body_data = opt_message_data.get('body', {})
|
||||
opt_body_data =[
|
||||
{
|
||||
"type": "NodeTypeRequirement",
|
||||
"nodeTypes": ["EDGE"],
|
||||
"jobIdForEDGE": "FCRnewLight0"
|
||||
}
|
||||
]
|
||||
logging.info(opt_body_data)
|
||||
print("Extracted body from Optim Message:", opt_body_data)
|
||||
|
||||
## Prepare message to be send to SAL
|
||||
RequestToSal = {
|
||||
"metaData": {"user": "admin"},
|
||||
"body": opt_body_data
|
||||
}
|
||||
print("RequestToSal:", RequestToSal)
|
||||
|
||||
# Convert the Python structure to a JSON string
|
||||
# RequestToSal = json.dumps(RequestToSal)
|
||||
|
||||
# Request the node candidates from SAL
|
||||
sal_reply = context.publishers['SAL-GET'].send_sync(RequestToSal, application_id,
|
||||
properties={'correlation_id': Optimizer_correlation_id}, raw=False)
|
||||
# sal_reply = context.publishers['SAL-GET'].send_sync(RequestToSal, application_id)
|
||||
if sal_reply:
|
||||
logging.info(f"Received reply from SAL: {sal_reply}")
|
||||
print("SAL reply:", sal_reply)
|
||||
else:
|
||||
print("No reply from SAL!")
|
||||
|
||||
## Prepare message to be sent to OPTIMIZER
|
||||
CFSBResponse = read_dummy_response_data_toOpt('CFSB_Body_Response.json')
|
||||
|
||||
# SAL_and_Scores_Body = Give me a short example
|
||||
# Encapsulate the data within the "body" structure
|
||||
# CFSBResponse = {
|
||||
# "metaData": {"user": "admin"},
|
||||
# "body": SAL_and_Scores_Body
|
||||
# }
|
||||
# print("CFSBResponse:", CFSBResponse)
|
||||
|
||||
# Send message to Optimizer
|
||||
context.get_publisher('SendToOPT').send(CFSBResponse, application_id)
|
||||
# context.publishers['SendToOPT'].send(CFSBResponse, application_id, properties={
|
||||
# 'correlation_id': Optimizer_correlation_id}, raw=True)
|
||||
|
||||
except json.JSONDecodeError as e:
|
||||
logging.error(f"Failed to parse message body from Optimizer as JSON: {e}")
|
||||
|
||||
class Bootstrap(ConnectorHandler):
|
||||
context = None
|
||||
def ready(self, context: Context):
|
||||
self.context = context
|
||||
|
||||
|
||||
def start_exn_connector_in_background():
|
||||
def run_connector():
|
||||
# eu.nebulouscloud.exn.sal.nodecandidate.*
|
||||
addressSAL_GET = 'eu.nebulouscloud.exn.sal.nodecandidate.get'
|
||||
|
||||
addressSAL_GET_REPLY = 'eu.nebulouscloud.exn.sal.nodecandidate.get.reply'
|
||||
addressOPTtriggering = 'eu.nebulouscloud.cfsb.get_node_candidates'
|
||||
addressSendToOPT = 'eu.nebulouscloud.cfsb.get_node_candidates.reply'
|
||||
|
||||
connector = EXN('ui', url="localhost", port=5672, username="admin", password="admin",
|
||||
handler=Bootstrap(),
|
||||
publishers=[
|
||||
SyncedPublisher('SAL-GET', addressSAL_GET, True, True),
|
||||
core.publisher.Publisher('SendToOPT', addressSendToOPT, True, True)
|
||||
],
|
||||
consumers=[
|
||||
# Consumer('SAL-GET-REPLY', addressSAL_GET, handler=SyncedHandler(), topic=True, fqdn=True),
|
||||
Consumer('OPT-triggering', addressOPTtriggering, handler=SyncedHandler(), topic=True, fqdn=True)
|
||||
])
|
||||
connector.start()
|
||||
|
||||
# Start the EXN connector in a separate thread
|
||||
thread = threading.Thread(target=run_connector)
|
||||
thread.daemon = True # Daemon threads will shut down immediately when the program exits
|
||||
thread.start()
|
||||
|
||||
|
||||
# Used to read dummy JSON and send to Optimizer
|
||||
def read_dummy_response_data_toOpt(file_path):
|
||||
with open(file_path, 'r') as file:
|
||||
data = json.load(file)
|
||||
# Encapsulating the data within the "body" structure
|
||||
encapsulated_data = {
|
||||
"metaData": {"user": "admin"},
|
||||
"body": data
|
||||
}
|
||||
return encapsulated_data
|
||||
|
1256
cfsb-backend/updated_SALs_JSON.json
Normal file
1256
cfsb-backend/updated_SALs_JSON.json
Normal file
File diff suppressed because it is too large
Load Diff
2
cfsb-frontend/.env
Normal file
2
cfsb-frontend/.env
Normal file
@ -0,0 +1,2 @@
|
||||
VUE_APP_BACKEND_URL=http://127.0.0.1:5000
|
||||
VITE_BACKEND_URL=http://127.0.0.1:5000
|
2
cfsb-frontend/.env.development
Normal file
2
cfsb-frontend/.env.development
Normal file
@ -0,0 +1,2 @@
|
||||
VUE_APP_BACKEND_URL=http://127.0.0.1:8001
|
||||
VITE_BACKEND_URL=http://127.0.0.1:8001
|
2
cfsb-frontend/.env.production
Normal file
2
cfsb-frontend/.env.production
Normal file
@ -0,0 +1,2 @@
|
||||
VUE_APP_BACKEND_URL=http://127.0.0.1:8001
|
||||
VITE_BACKEND_URL=http://127.0.0.1:8001
|
22
cfsb-frontend/Dockerfile
Normal file
22
cfsb-frontend/Dockerfile
Normal file
@ -0,0 +1,22 @@
|
||||
# Step 1: Build Stage
|
||||
FROM node:16 as build-stage
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY package.json package-lock.json ./
|
||||
|
||||
RUN npm install
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN npm run build
|
||||
|
||||
# Step 2: Nginx Stage
|
||||
FROM docker.io/nginx:alpine
|
||||
|
||||
COPY --from=build-stage /app/dist /usr/share/nginx/html
|
||||
COPY .env.production /usr/share/nginx/html/.env
|
||||
|
||||
EXPOSE 80
|
||||
|
||||
CMD ["nginx", "-g", "daemon off;"]
|
1297
cfsb-frontend/package-lock.json
generated
1297
cfsb-frontend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -10,6 +10,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"chart.js": "^4.4.1",
|
||||
"package.json": "^2.0.1",
|
||||
"vue-router": "^4.0.13"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
Binary file not shown.
Before Width: | Height: | Size: 4.2 KiB After Width: | Height: | Size: 15 KiB |
@ -28,17 +28,55 @@
|
||||
<!-- Main content where routed components will be displayed -->
|
||||
<!-- <router-view></router-view>
|
||||
<button v-if="showCriteriaSelectionButton" @click="goToCriteriaSelection">Go to Criteria Selection</button> -->
|
||||
|
||||
<div class="modal fade" id="userLoginModal" aria-hidden="false">
|
||||
<div class="modal-dialog">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<h1 class="modal-title">User login</h1>
|
||||
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
|
||||
<form @submit.prevent="submitUserLoginForm">
|
||||
<div class="mb-3">
|
||||
<label for="app_id" class="form-label">Insert Application ID</label>
|
||||
<input type="text" class="form-control" id="app_id" v-model="app_id" placeholder="Application ID" required>
|
||||
</div>
|
||||
<div class="mb-3">
|
||||
<label for="username" class="form-label">Your username</label>
|
||||
<input type="text" class="form-control" id="username" v-model="username" placeholder="Username" required>
|
||||
</div>
|
||||
|
||||
<div class="mb-3">
|
||||
<label for="password" class="form-label">Your password</label>
|
||||
<input type="password" class="form-control" id="password" v-model="password" placeholder="Password" required>
|
||||
</div>
|
||||
|
||||
<button type="submit" class="btn btn-success">Login</button>
|
||||
</form>
|
||||
|
||||
<div v-if="!login" class="alert alert-danger">Error</div>
|
||||
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal" ref="modalCloseBtn">Close</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<footer class="footer text-center p-2">
|
||||
<span class="text-white">© NebulOus</span>
|
||||
<span class="text-white">© NebulOus - Cloud Fog Service Broker</span>
|
||||
</footer>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<style>
|
||||
:root {
|
||||
--main-color: #7030A0;
|
||||
--main-color: #1b253b;
|
||||
--secondary-color: #e0cffc;
|
||||
--color-indigo-700: #3d0a91;
|
||||
--color-indigo-700: #172135;
|
||||
--light-gray-color: #f8f9fa;
|
||||
--medium-gray-color: #6c757d;
|
||||
}
|
||||
@ -89,22 +127,107 @@
|
||||
|
||||
.footer {
|
||||
background-color: var(--main-color);
|
||||
margin-top: 15px;
|
||||
}
|
||||
</style>
|
||||
|
||||
<script>
|
||||
export const backendURL = import.meta.env.VITE_BACKEND_URL;
|
||||
const apiURL = backendURL;
|
||||
export default {
|
||||
name: 'App',
|
||||
data() {
|
||||
return {
|
||||
username: "",
|
||||
password: "",
|
||||
uuid: "",
|
||||
app_id: "",
|
||||
login: true
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
goToCriteriaSelection() {
|
||||
this.$router.push('/criteria-selection');
|
||||
},
|
||||
checkUserLogin() {
|
||||
let uuid = localStorage.getItem('fog_broker_user_uuid');
|
||||
if (uuid) {
|
||||
console.log("user is set");
|
||||
} else {
|
||||
console.log("user not set");
|
||||
let myModal = new bootstrap.Modal(document.getElementById('userLoginModal'));
|
||||
myModal.show();
|
||||
}
|
||||
},
|
||||
async submitUserLoginForm() {
|
||||
console.log('username = :', this.username);
|
||||
let user_data = {
|
||||
'username': this.username,
|
||||
'password': this.password,
|
||||
'app_id': this.app_id,
|
||||
}
|
||||
let result = await this.fetchUser(user_data)
|
||||
this.username = "";
|
||||
this.password = "";
|
||||
},
|
||||
async fetchUser(user_data) {
|
||||
try {
|
||||
const response = await fetch(apiURL+'/login', {
|
||||
method: 'POST',
|
||||
headers: {'Content-Type': 'application/json'},
|
||||
body: JSON.stringify(user_data)
|
||||
});
|
||||
const data = await response.json();
|
||||
console.log(data);
|
||||
if (data.length===1) {
|
||||
this.uuid = data[0][2];
|
||||
console.log(data[0][2]);
|
||||
localStorage.setItem('fog_broker_user_uuid', data[0][2]);
|
||||
localStorage.setItem('fog_broker_app_id', user_data.app_id);
|
||||
let elem = this.$refs.modalCloseBtn
|
||||
elem.click()
|
||||
this.login = true;
|
||||
} else {
|
||||
this.login = false;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error fetching user:', error);
|
||||
}
|
||||
},
|
||||
getURLparams() {
|
||||
let app_in_url = false
|
||||
let user_in_url = false
|
||||
let app_id_from_js = new URL(location.href).searchParams.get('app_id');
|
||||
let user_id_from_js = new URL(location.href).searchParams.get('user_id');
|
||||
|
||||
if (app_id_from_js) {
|
||||
console.log('app_id from URL:', app_id_from_js);
|
||||
this.app_id = app_id_from_js;
|
||||
app_in_url = true;
|
||||
localStorage.setItem('fog_broker_app_id', this.app_id);
|
||||
}
|
||||
if (user_id_from_js) {
|
||||
console.log('user_id from URL:', user_id_from_js);
|
||||
this.uuid = user_id_from_js;
|
||||
user_in_url = true
|
||||
localStorage.setItem('fog_broker_user_uuid', this.uuid);
|
||||
}
|
||||
if (app_in_url && user_in_url){
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
showCriteriaSelectionButton() {
|
||||
return this.$route.path === '/'; /* other conditions */
|
||||
}
|
||||
},
|
||||
mounted() {
|
||||
this.getURLparams();
|
||||
this.checkUserLogin();
|
||||
}
|
||||
};
|
||||
</script>
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
/* color palette from <https://github.com/vuejs/theme> */
|
||||
:root {
|
||||
--vt-c-white: #ffffff;
|
||||
--vt-c-white-soft: #f8f8f8;
|
||||
--vt-c-white-mute: #f2f2f2;
|
||||
--vt-c-white: #1B253BFF;
|
||||
--vt-c-white-soft: #1b253b;
|
||||
--vt-c-white-mute: #1b253b;
|
||||
|
||||
--vt-c-black: #181818;
|
||||
--vt-c-black-soft: #222222;
|
||||
|
@ -1,28 +1,33 @@
|
||||
<template>
|
||||
<div class="row" style="padding-bottom: 2rem">
|
||||
</div>
|
||||
<div class="row" style="padding-bottom: 2rem"></div>
|
||||
<div class="container">
|
||||
<div class="row justify-content-center">
|
||||
<div class="col col-12 col-lg-8">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
<!-- Use a flex container for the header -->
|
||||
<div class="card-header d-flex justify-content-between align-items-center">
|
||||
<h2>Selection of Criteria</h2>
|
||||
<!-- Clickable icon and text for expanding/collapsing -->
|
||||
<span class="expand-collapse-link" @click="toggleExpandAll">
|
||||
<i v-bind:class="expandIconClass"></i>{{ expandButtonText }}</span>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<HierarchicalCategoryList
|
||||
:items="hierarchicalCategoryList"
|
||||
@selected-items="updateSelectedItems"
|
||||
></HierarchicalCategoryList>
|
||||
<p class="description">
|
||||
Please select at least two criteria to proceed.
|
||||
</p>
|
||||
<!-- HierarchicalCategoryList is included here with the necessary bindings -->
|
||||
<HierarchicalCategoryList ref="hierarchicalList" :items="hierarchicalCategoryList" @selected-items="updateSelectedItems"/>
|
||||
</div>
|
||||
</div>
|
||||
<!-- <div>Selected Items Length: {{ selectedItems.length }}</div>
|
||||
<button v-if="selectedItems.length > 0" @click="navigateToDataGrid">Go to DataGrid</button> -->
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
|
||||
<script>
|
||||
export const backendURL = import.meta.env.VITE_BACKEND_URL;
|
||||
const apiURL = backendURL;
|
||||
import HierarchicalCategoryList from "@/components/HierarchicalCategoryList.vue";
|
||||
|
||||
export default {
|
||||
@ -33,32 +38,62 @@ export default {
|
||||
return {
|
||||
hierarchicalCategoryList: [],
|
||||
selectedItems: [],
|
||||
allCategoriesExpanded: false
|
||||
};
|
||||
},
|
||||
mounted() {
|
||||
console.log('CriteriaSelection.vue mounted');
|
||||
this.fetchHierarchicalCategoryList();
|
||||
},
|
||||
computed: {
|
||||
expandButtonText() {
|
||||
return this.allCategoriesExpanded ? 'Collapse All' : 'Expand All';
|
||||
},
|
||||
expandIconClass() {
|
||||
return this.allCategoriesExpanded ? 'bi-arrow-bar-up' : 'bi-arrow-bar-down';
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
async fetchHierarchicalCategoryList() {
|
||||
try {
|
||||
const response = await fetch('http://127.0.0.1:5000/get_hierarchical_category_list');
|
||||
const response = await fetch(apiURL+'/get_hierarchical_category_list');
|
||||
const data = await response.json();
|
||||
this.hierarchicalCategoryList = data;
|
||||
} catch (error) {
|
||||
console.error('Error fetching hierarchical category list:', error);
|
||||
}
|
||||
},
|
||||
navigateToDataGrid() {
|
||||
console.log('Navigating to DataGrid');
|
||||
this.$router.push({ name: 'DataGrid' });
|
||||
toggleExpandAll() {
|
||||
this.allCategoriesExpanded = !this.allCategoriesExpanded;
|
||||
if (this.$refs.hierarchicalList) {
|
||||
this.$refs.hierarchicalList.setChildrenVisibility(this.hierarchicalCategoryList, this.allCategoriesExpanded);
|
||||
}
|
||||
},
|
||||
updateSelectedItems(newSelectedItems) {
|
||||
//console.log('Updating selected items in CriteriaSelection.vue:', newSelectedItems);
|
||||
this.selectedItems = newSelectedItems;
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
};
|
||||
</script>
|
||||
|
||||
<style>
|
||||
.card-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.expand-collapse-link {
|
||||
cursor: pointer;
|
||||
color: var(--color-indigo-700); user-select: none;
|
||||
display: flex;
|
||||
align-items: center; /* Aligns the icon and text vertically */
|
||||
}
|
||||
|
||||
.expand-collapse-link i {
|
||||
margin-right: 0.5rem; /* Add some space between the icon and text */
|
||||
}
|
||||
|
||||
</style>
|
@ -1,25 +1,68 @@
|
||||
<template>
|
||||
<div>
|
||||
<div class="p-4">
|
||||
<h2>Edge / Fog Nodes Data</h2>
|
||||
<h2>Nodes Data</h2>
|
||||
</div>
|
||||
<!-- <table v-if="gridData.length" class="grid-cell-class">-->
|
||||
<!-- <thead>-->
|
||||
<!-- <tr>-->
|
||||
<!-- <th>Node</th>-->
|
||||
<!-- <!– Assuming all entries have the same criteria, using the first one to generate headers –>-->
|
||||
<!-- <th v-for="(criterion, index) in gridData[0].criteria" :key="index">-->
|
||||
<!-- {{ criterion.title }}-->
|
||||
<!-- </th>-->
|
||||
<!-- </tr>-->
|
||||
<!-- </thead>-->
|
||||
<!-- <tbody>-->
|
||||
<!-- <tr v-for="(entry, entryIndex) in gridData" :key="entry.name">-->
|
||||
<!-- <td>{{ entry.name }}</td>-->
|
||||
<!-- <td v-for="(dataValue, dataIndex) in entry.data_values" :key="`${entry.name}-${dataIndex}`">-->
|
||||
<!-- <!– Numeric data type –>-->
|
||||
<!-- <template v-if="dataValue.data_type.type === 2">-->
|
||||
<!-- <input type="number" v-model="dataValue.value" @blur="validateNumeric(entry.data_values, dataIndex)" step="0.5"/>-->
|
||||
<!-- </template>-->
|
||||
|
||||
<table v-if="Object.keys(gridData).length" class="grid-cell-class">
|
||||
<!-- <!– Ordinal data type –>-->
|
||||
<!-- <template v-else-if="dataValue.data_type.type === 1">-->
|
||||
<!-- <select v-model="dataValue.value">-->
|
||||
<!-- <option v-for="option in dataValue.data_type.values" :value="option" :key="option">{{ option }}</option>-->
|
||||
<!-- </select>-->
|
||||
<!-- </template>-->
|
||||
|
||||
<!-- <!– Boolean data type –>-->
|
||||
<!-- <template v-else-if="dataValue.data_type.type === 5">-->
|
||||
<!-- <select v-model="dataValue.value">-->
|
||||
<!-- <option v-for="option in ['True', 'False']" :value="option" :key="option">{{ option }}</option>-->
|
||||
<!-- </select>-->
|
||||
<!-- </template>-->
|
||||
|
||||
<!-- <!– Fallback or other data types –>-->
|
||||
<!-- <template v-else>-->
|
||||
<!-- <input type="text" v-model="dataValue.value" />-->
|
||||
<!-- </template>-->
|
||||
<!-- </td>-->
|
||||
<!-- </tr>-->
|
||||
<!-- </tbody>-->
|
||||
<!-- </table>-->
|
||||
<table v-if="gridData.length" class="grid-cell-class">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Edge / Fog Nodes</th>
|
||||
<th v-for="(values, column) in gridData" :key="column">{{ values.title }}</th>
|
||||
<th>Node</th>
|
||||
<th v-for="(criterion, index) in gridData[0].criteria" :key="index">{{ criterion.title }}</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr v-for="index in rowCount" :key="index">
|
||||
<!-- (-1) Because there is a different indexing in the gridData and the fog node titles that starts from 0 -->
|
||||
<td>{{ fogNodesTitles[index-1] }}</td>
|
||||
<td v-for="(values, column) in gridData" :key="`${column}-${index}`">
|
||||
<select v-if="Ordinal_Variables.includes(column)" v-model="values.data_values[index - 1]">
|
||||
<option v-for="option in dropdownOptions" :value="option" :key="option">{{ option }}</option>
|
||||
<tr v-for="(entry, entryIndex) in gridData" :key="entry.name">
|
||||
<td>{{ entry.name }}</td>
|
||||
<td v-for="(criterion, criterionIndex) in entry.criteria" :key="`${entry.name}-${criterionIndex}`">
|
||||
<input v-if="criterion.data_type.type === 2" type="number" v-model="criterion.value" />
|
||||
<select v-else-if="criterion.data_type.type === 1" v-model="criterion.value">
|
||||
<option v-for="option in criterion.data_type.values" :value="option" :key="option">{{ option }}</option>
|
||||
</select>
|
||||
<input v-else type="text" v-model="values.data_values[index - 1]" />
|
||||
<select v-else-if="criterion.data_type.type === 5" v-model="criterion.value">
|
||||
<option v-for="option in ['True', 'False']" :value="option">{{ option }}</option>
|
||||
</select>
|
||||
<input v-else type="text" v-model="criterion.value" />
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
@ -28,58 +71,62 @@
|
||||
No data to display.
|
||||
</div>
|
||||
<div class="pt-4"></div>
|
||||
<!-- <button @click="SaveDataforEvaluation" class="bg-color-primary">Save and Run Evaluation</button> -->
|
||||
<button @click="goBackToCriteriaSelection" class="bg-color-primary">Back to Criteria Selection</button>
|
||||
<button @click="SaveDataforWR" class="bg-color-primary">Save and Add Weight Restrictions</button>
|
||||
</div>
|
||||
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { useRouter } from 'vue-router';
|
||||
|
||||
export const backendURL = import.meta.env.VITE_BACKEND_URL;
|
||||
const apiURL = backendURL;
|
||||
export default {
|
||||
data() {
|
||||
return {
|
||||
fogNodesTitles: [],
|
||||
gridData: [], // Data for the grid
|
||||
selectedItemsFromBack: [],
|
||||
Ordinal_Variables: ['attr-reputation', 'attr-assurance', 'attr-security'],
|
||||
dropdownOptions: ['High', 'Medium', 'Low'], // Options for the dropdown
|
||||
};
|
||||
},
|
||||
setup() {
|
||||
const router = useRouter();
|
||||
return {
|
||||
router
|
||||
NodeNames: [],
|
||||
gridData: [], // Updated to be an array to match the structure provided by the backend
|
||||
};
|
||||
},
|
||||
mounted() {
|
||||
const selectedItems = this.$route.params.selectedItems || [];
|
||||
if (selectedItems.length > 0) {
|
||||
this.fetchGridData(selectedItems);
|
||||
let selectedItemsWithTypes = this.getSelectedItemsFromStorage();
|
||||
if (!selectedItemsWithTypes.length) {
|
||||
selectedItemsWithTypes = this.$route.params.selectedItems || [];
|
||||
}
|
||||
this.fetchFogNodesTitles();
|
||||
},
|
||||
computed: {
|
||||
rowCount() {
|
||||
// Check if gridData has any keys and use the first key to find the row count
|
||||
const firstKey = Object.keys(this.gridData)[0];
|
||||
return firstKey ? this.gridData[firstKey].data_values.length : 0;
|
||||
if (selectedItemsWithTypes.length > 0) {
|
||||
this.fetchGridData(selectedItemsWithTypes.map(item => item.name));
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
// Receives the Grid Data 1st time
|
||||
getSelectedItemsFromStorage() {
|
||||
const storedItems = localStorage.getItem('selectedCriteria');
|
||||
return storedItems ? JSON.parse(storedItems) : [];
|
||||
},
|
||||
async fetchGridData(selectedItems) {
|
||||
try {
|
||||
const response = await fetch('http://127.0.0.1:5000/process_selected_items', {
|
||||
// Retrieve app_id and user_id from local storage directly within this method
|
||||
const app_id = localStorage.getItem('fog_broker_app_id');
|
||||
const user_id = localStorage.getItem('fog_broker_user_uuid');
|
||||
const response = await fetch(apiURL+'/process_selected_criteria', {
|
||||
method: 'POST',
|
||||
headers: {'Content-Type': 'application/json'},
|
||||
body: JSON.stringify({selectedItems}),
|
||||
// body: JSON.stringify({selectedItems}),
|
||||
body: JSON.stringify({
|
||||
selectedItems,
|
||||
app_id, // Include app_id from local storage
|
||||
user_id // Include user_id from local storage
|
||||
})
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
const criteria_data = await response.json();
|
||||
this.gridData = criteria_data.gridData; // Assigning the gridData from the response
|
||||
console.log('DataGrid.vue received the criteria data from the Backend:', this.gridData); // Log the received grid data
|
||||
const { gridData, NodeNames } = await response.json();
|
||||
// Initialize data_values for each entry in gridData
|
||||
this.gridData = gridData.map(entry => ({
|
||||
...entry,
|
||||
data_values: entry.criteria.map(criterion => ({
|
||||
value: criterion.value,
|
||||
data_type: criterion.data_type
|
||||
}))
|
||||
}));
|
||||
this.NodeNames = NodeNames || [];
|
||||
} else {
|
||||
throw new Error('Failed to fetch grid data');
|
||||
}
|
||||
@ -87,81 +134,136 @@ export default {
|
||||
console.error('Error fetching grid data:', error);
|
||||
}
|
||||
},
|
||||
fetchFogNodesTitles() { // Receives the names of fog nodes (grid's 1st column)
|
||||
fetch('http://127.0.0.1:5000/get-fog-nodes-titles')
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
// 'data' is an array like ['Fog Node 1', 'Fog Node 2', ...]
|
||||
this.fogNodesTitles = data;
|
||||
})
|
||||
.catch(error => console.error('Error fetching fog nodes titles:', error));
|
||||
validateNumeric(entry, criterionIndex) {
|
||||
// Directly modify and validate numeric value within the entry's criteria
|
||||
const numericValue = parseFloat(entry.criteria[criterionIndex].value);
|
||||
if (isNaN(numericValue) || numericValue <= 0) {
|
||||
alert('Please enter a number greater than zero.');
|
||||
entry.criteria[criterionIndex].value = ''; // Reset invalid value
|
||||
return false; // Halt further processing
|
||||
} else {
|
||||
entry.criteria[criterionIndex].value = numericValue; // Update with valid numeric value
|
||||
}
|
||||
},
|
||||
validateGridData() {
|
||||
for (const key in this.gridData) {
|
||||
if (this.gridData.hasOwnProperty(key)) {
|
||||
const dataValues = this.gridData[key].data_values;
|
||||
for (const value of dataValues) {
|
||||
if (value === 0 || value === null || value === '') {
|
||||
return false; // Invalid data found
|
||||
for (const entry of this.gridData) {
|
||||
for (const criterion of entry.criteria) {
|
||||
// Convert value to string to handle trimming and empty checks
|
||||
const valueAsString = String(criterion.value).trim();
|
||||
|
||||
switch (criterion.data_type.type) {
|
||||
case 2: // Numeric
|
||||
const numericValue = parseFloat(valueAsString);
|
||||
if (isNaN(numericValue) || numericValue <= 0) {
|
||||
alert('Please enter a valid number for all numeric fields.');
|
||||
return false; // Prevent further processing
|
||||
}
|
||||
break;
|
||||
|
||||
case 1: // Ordinal
|
||||
if (!criterion.data_type.values.includes(criterion.value)) {
|
||||
alert(`Please select a valid option for ${criterion.title}.`);
|
||||
return false; // Prevent further processing
|
||||
}
|
||||
break;
|
||||
|
||||
case 5: // Boolean
|
||||
if (!["True", "False"].includes(valueAsString)) {
|
||||
alert(`Please select a valid boolean value for ${criterion.title}.`);
|
||||
return false; // Prevent further processing
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
// Check for empty values for any other data types
|
||||
if (valueAsString === '') {
|
||||
alert(`Please ensure all fields are filled for ${criterion.title}.`);
|
||||
return false; // Prevent further processing
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true; // All data is valid
|
||||
return true; // All validations passed
|
||||
},
|
||||
goBackToCriteriaSelection() {
|
||||
this.$router.push({ name: 'CriteriaSelection' });
|
||||
},
|
||||
async SaveDataforWR() {
|
||||
if (!this.validateGridData()) {
|
||||
alert('Invalid input: Zero or null values are not accepted.');
|
||||
return; // Stop submission if validation fails
|
||||
return;
|
||||
}
|
||||
else{
|
||||
// Log the current state of gridData
|
||||
console.log("Before saving, gridData is:", JSON.parse(JSON.stringify(this.gridData)));
|
||||
|
||||
try {
|
||||
const DataforWR = JSON.stringify({
|
||||
gridData: this.gridData
|
||||
const formattedGridData = this.gridData.map(node => ({
|
||||
name: node.name,
|
||||
id: node.id,
|
||||
criteria: node.criteria.map(criterion => ({
|
||||
title: criterion.title,
|
||||
value: criterion.value,
|
||||
data_type: criterion.data_type.type
|
||||
}))
|
||||
}));
|
||||
|
||||
const DataforWR = JSON.stringify(formattedGridData);
|
||||
localStorage.setItem('gridData', DataforWR); // Save gridData to localStorage
|
||||
// console.log("Save DataforWR DataGrid.VUE to localstorage:", JSON.stringify(JSON.parse(DataforWR), null, 2));
|
||||
|
||||
// Save the NodeNames to localStorage
|
||||
const NodeNames = JSON.stringify(this.NodeNames);
|
||||
localStorage.setItem('NodeNames', NodeNames);
|
||||
|
||||
// Navigate to WR component with prepared data and NodeNames
|
||||
this.$router.push({
|
||||
name: 'WR',
|
||||
params: {
|
||||
data: DataforWR,
|
||||
NodeNames: NodeNames // Include NodeNames in the route parameters
|
||||
}
|
||||
});
|
||||
// Navigate to WR component with data
|
||||
this.router.push({ name: 'WR', params: { data: DataforWR } });
|
||||
} catch (error) {
|
||||
console.error('Error:', error);
|
||||
}
|
||||
}
|
||||
},
|
||||
async SaveDataforEvaluation() {
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
};
|
||||
</script>
|
||||
|
||||
|
||||
<style>
|
||||
/* Basic table styling */
|
||||
table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
/* Header styling */
|
||||
th {
|
||||
background-color: #813F8F; /* Primary color */
|
||||
color: #FFFFFF; /* White text */
|
||||
padding: 10px;
|
||||
text-align: center;
|
||||
background-color: #232d45; /* Primary color */
|
||||
color: #FFFFFF; /* White text */
|
||||
padding: 10px;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
/* Row styling */
|
||||
td {
|
||||
background-color: #E7E7E7; /* Light grey */
|
||||
color: #374591; /* Secondary color */
|
||||
padding: 8px;
|
||||
background-color: #E7E7E7; /* Light grey */
|
||||
color: #172135; /* Secondary color */
|
||||
padding: 8px;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
/* Alternate row colors for better readability */
|
||||
tr:nth-child(even) {
|
||||
background-color: #E4DCD5; /* Light tan */
|
||||
background-color: #155e75; /* Light tan */
|
||||
}
|
||||
|
||||
/* Hover effect on rows */
|
||||
tr:hover {
|
||||
background-color: #6FBFFF; /* Light blue */
|
||||
background-color: #6FBFFF; /* Light blue */
|
||||
}
|
||||
|
||||
/* Additional styles for editable input fields in the table */
|
||||
@ -200,10 +302,11 @@ button:hover {
|
||||
select {
|
||||
width: 100%;
|
||||
padding: 8px;
|
||||
border: 1px solid #ccc;
|
||||
border: 1px solid #1b253b;
|
||||
}
|
||||
|
||||
.grid-cell-class {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
</style>
|
@ -3,35 +3,37 @@
|
||||
<form v-if="!isChild" @submit.prevent="submitSelection">
|
||||
<ul class="list-group">
|
||||
<li v-for="item in items" :key="item.name" class="list-group-item criteria-card">
|
||||
<span v-if="item.children.length > 0" @click="toggleCategory(item)" class="float-end" v-bind:title="'Expand ' + item.title"><i class="bi bi-arrow-bar-down" v-bind:aria-label="'Expand ' + item.title"></i></span>
|
||||
<span v-if="item.children.length > 0" @click="toggleCategory(item)" class="float-end" v-bind:title="'Expand ' + item.title">
|
||||
<i class="bi bi-arrow-bar-down" v-bind:aria-label="'Expand ' + item.title"></i>
|
||||
</span>
|
||||
<label>
|
||||
<!--
|
||||
<input v-model="item.checked" type="checkbox" @change="handleCheckboxChange(item)" />
|
||||
<span @click="toggleCategory(item)" v-bind:title="item.description">{{ item.title }}</span> -->
|
||||
<!-- <input v-model="item.checked" type="checkbox" @change="handleCheckboxChange(item)" /> -->
|
||||
<input type="checkbox" :checked="item.checked" @change="() => handleCheckboxChange(item)" />
|
||||
<span @click="toggleCategory(item)">{{ item.title }}</span>
|
||||
<span @click="toggleCategory(item)" v-bind:title="item.description"> {{ item.title }} ({{ getTypeName(item.type) }}) </span>
|
||||
</label>
|
||||
<ul v-show="item.showChildren" class="list-group">
|
||||
<!-- Recursive call without Submit button -->
|
||||
<HierarchicalCategoryList :isChild="true" :items="item.children" @selected-items="updateSelectedItems" />
|
||||
<!-- <HierarchicalCategoryList :isChild="true" :items="item.children" @selected-items="updateSelectedItems" /> -->
|
||||
<HierarchicalCategoryList :isChild="true" :items="item.children" />
|
||||
</ul>
|
||||
</li>
|
||||
</ul>
|
||||
<button @click="goBackToHome" class="bg-color-primary">Back</button>
|
||||
<!-- Submit button outside the recursive structure -->
|
||||
<button type="submit" class="bg-color-primary">Submit</button>
|
||||
<button type="submit" class="bg-color-primary">Next</button>
|
||||
</form>
|
||||
<div v-else>
|
||||
<li v-for="item in items" :key="item.name" class="list-group-item criteria-card">
|
||||
<span v-if="item.children.length > 0" @click="toggleCategory(item)" class="float-end" v-bind:title="'Expand ' + item.title"><i class="bi bi-arrow-bar-down" v-bind:aria-label="'Expand ' + item.title"></i></span>
|
||||
<label>
|
||||
<!-- <input v-model="item.checked" type="checkbox" @change="handleCheckboxChange(item)" />
|
||||
<span @click="toggleCategory(item)" v-bind:title="item.description">{{ item.title }}</span> -->
|
||||
<!-- <input v-model="item.checked" type="checkbox" @change="handleCheckboxChange(item)" />-->
|
||||
<input type="checkbox" :checked="item.checked" @change="() => handleCheckboxChange(item)" />
|
||||
<span @click="toggleCategory(item)">{{ item.title }}</span>
|
||||
<span @click="toggleCategory(item)" v-bind:title="item.description"> {{ item.title }} ({{ getTypeName(item.type) }}) </span>
|
||||
</label>
|
||||
<ul v-show="item.showChildren">
|
||||
<!-- Recursive call without Submit button -->
|
||||
<HierarchicalCategoryList :isChild="true" :items="item.children" @selected-items="updateSelectedItems" />
|
||||
<!-- Recursive call without Submit button
|
||||
<HierarchicalCategoryList :isChild="true" :items="item.children" @selected-items="updateSelectedItems" /> -->
|
||||
<HierarchicalCategoryList :isChild="true" :items="item.children" />
|
||||
</ul>
|
||||
</li>
|
||||
</div>
|
||||
@ -46,11 +48,12 @@ export default {
|
||||
type: Boolean,
|
||||
default: false,
|
||||
},
|
||||
updateItemType: Function,
|
||||
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
localSelectedItems: [],
|
||||
selectedItemsFromBack: [],
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
@ -95,50 +98,91 @@ export default {
|
||||
}
|
||||
return null;
|
||||
},
|
||||
setChildrenVisibility(items, visible) {
|
||||
items.forEach(item => {
|
||||
item.showChildren = visible;
|
||||
if (item.children && item.children.length > 0) {
|
||||
this.setChildrenVisibility(item.children, visible);
|
||||
}
|
||||
});
|
||||
},
|
||||
updateSelectedItems() {
|
||||
// Update the selected items list
|
||||
this.localSelectedItems = this.collectSelectedItems(this.items);
|
||||
// Emit the updated list
|
||||
this.$emit('selected-items', this.localSelectedItems);
|
||||
// Update the selected items list with additional type information
|
||||
const selectedItemsWithType = this.items
|
||||
.filter(item => item.checked)
|
||||
.map(item => ({ name: item.name, type: item.type }));
|
||||
},
|
||||
getTypeName(type) {
|
||||
switch (type) {
|
||||
case 2: return 'Numeric';
|
||||
case 1: return 'Ordinal';
|
||||
case 5: return 'Boolean';
|
||||
case 7: return 'Ordinal';
|
||||
default: return 'Numeric';
|
||||
}
|
||||
},
|
||||
collectSelectedItems(items) {
|
||||
let selectedItems = [];
|
||||
for (const item of items) {
|
||||
if (item.checked) {
|
||||
selectedItems.push(item.name);
|
||||
console.log(`Selected item: ${item.name}, Type: ${item.type}, Title: ${item.title}`);
|
||||
selectedItems.push({ name: item.name, type: item.type, title: item.title });
|
||||
//console.log('Selected items in collectSelectedItems:', selectedItems); // Log selected items
|
||||
}
|
||||
if (item.children) {
|
||||
selectedItems = selectedItems.concat(this.collectSelectedItems(item.children));
|
||||
if (item.children && item.children.length > 0) {
|
||||
const childSelectedItems = this.collectSelectedItems(item.children);
|
||||
selectedItems = selectedItems.concat(childSelectedItems);
|
||||
}
|
||||
}
|
||||
return selectedItems;
|
||||
},
|
||||
submitSelection() {
|
||||
async submitSelection() {
|
||||
const selectedItems = this.collectSelectedItems(this.items);
|
||||
//console.log('Selected items in Submit:', selectedItems); // Log selected items
|
||||
|
||||
if (selectedItems.length < 2) {
|
||||
alert('Please select at least two items before submitting.');
|
||||
let nonBooleanCriteriaCount = 0;
|
||||
let selectedItemsWithType = selectedItems.map(item => ({
|
||||
name: item.name,
|
||||
type: item.type,
|
||||
title: item.title
|
||||
}));
|
||||
|
||||
for (const item of selectedItemsWithType) {
|
||||
console.log(`Item: ${item.name}, Type: ${item.type}`); // Add this line for debugging
|
||||
if (item.type !== 5) { // Or item.type !== 'Boolean' depending on the actual format
|
||||
nonBooleanCriteriaCount++;
|
||||
}
|
||||
}
|
||||
|
||||
//console.log('Non-boolean criteria count:', nonBooleanCriteriaCount); // Log non-boolean criteria count
|
||||
console.log('selectedItemsWithType:', selectedItemsWithType);
|
||||
|
||||
if (selectedItemsWithType.length < 2) {
|
||||
//console.log('Blocking submission due to insufficient criteria selection.');
|
||||
alert('Please select at least two criteria to proceed.');
|
||||
return;
|
||||
}
|
||||
|
||||
// Emitting the selected items - useful if there's a parent component listening to this event
|
||||
this.$emit('selected-items', selectedItems);
|
||||
if (nonBooleanCriteriaCount < 2) {
|
||||
//console.log('Blocking submission due to insufficient non-boolean criteria selection.');
|
||||
alert('Please select at least two non-boolean criteria.');
|
||||
return;
|
||||
}
|
||||
|
||||
// Programmatic navigation to the DataGrid page, passing the selected items as route parameters
|
||||
this.$router.push({ name: 'DataGrid', params: { selectedItems: selectedItems } });
|
||||
},
|
||||
async postSelectedItems(selectedItems) {
|
||||
const requestOptions = {
|
||||
method: 'POST',
|
||||
headers: {'Content-Type': 'application/json'},
|
||||
body: JSON.stringify({selectedItems}),
|
||||
};
|
||||
const response = await fetch('http://127.0.0.1:5000/process_selected_items', requestOptions);
|
||||
const data = await response.json();
|
||||
//console.log('Send Selected items to back', data);
|
||||
this.selectedItemsFromBack = data;
|
||||
},
|
||||
// Save the selected items with types to Local Storage
|
||||
localStorage.setItem('selectedCriteria', JSON.stringify(selectedItemsWithType));
|
||||
|
||||
// Emitting the selected items with types to the DataGrid.vue
|
||||
// this.$emit('selected-items', selectedItemsWithType);
|
||||
|
||||
// Navigate to DataGrid.vue, passing only the item names as route parameters
|
||||
const itemNames = selectedItemsWithType.map(item => item.name);
|
||||
this.$router.push({ name: 'DataGrid', params: { selectedItems: itemNames } });
|
||||
},
|
||||
goBackToHome() {
|
||||
this.$router.push({ name: 'HomePage' });
|
||||
}
|
||||
}
|
||||
};
|
||||
</script>
|
||||
|
||||
@ -193,6 +237,8 @@ button {
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
transition: background-color 0.3s ease;
|
||||
margin-bottom: 10px;
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
button:hover {
|
||||
@ -217,4 +263,5 @@ ul {
|
||||
background-color: var(--light-gray-color);
|
||||
color: var(--main-color);
|
||||
}
|
||||
|
||||
</style>
|
||||
|
@ -1,18 +1,45 @@
|
||||
<script>
|
||||
export default {
|
||||
name: "HomePage"
|
||||
name: "HomePage",
|
||||
// methods: {
|
||||
// async saveIds(appId, userId) {
|
||||
// fetch(' http://127.0.0.1:5000/save_ids', {
|
||||
// method: 'POST',
|
||||
// headers: {
|
||||
// 'Content-Type': 'application/json',
|
||||
// },
|
||||
// body: JSON.stringify({
|
||||
// app_id: appId,
|
||||
// user_id: userId,
|
||||
// }),
|
||||
// })
|
||||
// .then(response => response.json())
|
||||
// .then(data => {
|
||||
// console.log('Success:', data);
|
||||
// // Save to local storage
|
||||
// localStorage.setItem('app_id', appId);
|
||||
// localStorage.setItem('user_id', userId);
|
||||
// })
|
||||
// .catch((error) => {
|
||||
// console.error('Error:', error);
|
||||
// });
|
||||
// }
|
||||
// },
|
||||
// mounted() {
|
||||
// // Example usage
|
||||
// // this.saveIds('d535cf554ea66fbebfc415ac837a5828', 'e3ff4006-be5f-4e00-bbe1-e49a88b2541a');
|
||||
// },
|
||||
}
|
||||
</script>
|
||||
|
||||
<template>
|
||||
|
||||
<div class="container">
|
||||
<div class="row p-4 text-center">
|
||||
<div class="col col-12">
|
||||
<h1 class="display-2">Welcome to <span style="color: var(--main-color);">NebulOus</span></h1>
|
||||
<h1 class="display-2">Welcome to <span style="color: var(--main-color);">Cloud Fog Service Broker</span></h1>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!--
|
||||
<div class="row align-items-center">
|
||||
<div class="col col-12 col-md-6 col-lg-6">
|
||||
<p class="lead">Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book.</p>
|
||||
@ -30,7 +57,7 @@ export default {
|
||||
<p class="lead">Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book.</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
-->
|
||||
<div class="spacer-sm"></div>
|
||||
|
||||
<div class="row text-center p-4 bg-row border-radius-sm">
|
||||
@ -43,7 +70,7 @@ export default {
|
||||
|
||||
<div class="row p-4 text-center">
|
||||
<div class="col col-12">
|
||||
<h2 class="display-4">How does it work</h2>
|
||||
<h2 class="display-4">Architecture</h2>
|
||||
</div>
|
||||
<div class="col col-12">
|
||||
<img src="/images/Broker.png" class="img-fluid border-radius-md" alt="...">
|
||||
@ -54,9 +81,16 @@ export default {
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
.bg-row {
|
||||
background-color: var(--secondary-color);
|
||||
button-primary:hover{
|
||||
border: 2px #172135;
|
||||
}
|
||||
.bg-row {
|
||||
background-color: #e9ebed;
|
||||
}
|
||||
.row{text-align: justify;
|
||||
}
|
||||
.img-fluid {
|
||||
max-width: 75%;
|
||||
height: auto;
|
||||
}
|
||||
|
||||
</style>
|
@ -1,13 +1,16 @@
|
||||
<template>
|
||||
<div class="results-container">
|
||||
<h2>Evaluation Results</h2>
|
||||
<p class="description">
|
||||
The scores have been rounded to the nearest two decimal places.
|
||||
</p>
|
||||
<div v-if="loading" class="loading">Loading...</div>
|
||||
<div v-else>
|
||||
<!-- Table for displaying the results -->
|
||||
<table v-if="results.length > 0">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Fog Node</th>
|
||||
<th>Node</th>
|
||||
<th>Score (%)</th>
|
||||
<th>Ranking</th>
|
||||
</tr>
|
||||
@ -20,6 +23,8 @@
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<!-- Separator Line -->
|
||||
<div class="separator-line"></div>
|
||||
<!-- Chart Container -->
|
||||
<div class="charts-container">
|
||||
<div class="chart-wrapper">
|
||||
@ -30,51 +35,136 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- Separator Line -->
|
||||
<div class="separator-line"></div>
|
||||
<div class="button-container">
|
||||
<button @click="goBackToWR">Add/Modify Weight Restrictions</button>
|
||||
<button @click="saveProjectResults">Save Project</button>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import Chart from 'chart.js/auto';
|
||||
<script>
|
||||
export const backendURL = import.meta.env.VITE_BACKEND_URL;
|
||||
const apiURL = backendURL;
|
||||
import Chart from 'chart.js/auto';
|
||||
|
||||
export default {
|
||||
export default {
|
||||
data() {
|
||||
return {
|
||||
results: [],
|
||||
loading: true,
|
||||
deaScoresChart: null,
|
||||
ranksChart: null,
|
||||
gridData: null,
|
||||
relativeWRData: null,
|
||||
immediateWRData: null
|
||||
};
|
||||
},
|
||||
mounted() {
|
||||
this.fetchResults();
|
||||
const resultsString = localStorage.getItem('evaluationResults');
|
||||
|
||||
try {
|
||||
const data = JSON.parse(resultsString);
|
||||
if (data && data.results) {
|
||||
this.results = data.results;
|
||||
this.createCharts();
|
||||
this.loading = false;
|
||||
} else {
|
||||
console.error('Error fetching results: Data is not in the expected format.');
|
||||
this.loading = false;
|
||||
// Handle the error by navigating to a different page or displaying an error message
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error parsing JSON:', error);
|
||||
// Handle parsing error by navigating to a different page or displaying an error message
|
||||
this.loading = false;
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
goBackToWR() {
|
||||
// Make sure 'WR' matches the name of the route in your router configuration
|
||||
this.$router.push({ name: 'WR' });
|
||||
},
|
||||
saveProjectResults() {
|
||||
// For now, this method is a placeholder
|
||||
async saveProjectResults() {
|
||||
if (confirm("Save Project?")) {
|
||||
console.log('Save Project Results button clicked');
|
||||
let array_data = []
|
||||
// Application Id
|
||||
let app_id = localStorage.getItem('fog_broker_app_id');
|
||||
let appData = [
|
||||
{app_id: app_id}
|
||||
];
|
||||
array_data.push(appData);
|
||||
|
||||
// Node Names
|
||||
let NodeNamesFromStorage = localStorage.getItem('NodeNames');
|
||||
let NodeNames = JSON.parse(NodeNamesFromStorage);
|
||||
array_data.push(NodeNames);
|
||||
|
||||
// Selected Criteria
|
||||
let selectedCriteriaFromStorage = localStorage.getItem('selectedCriteria');
|
||||
let selectedCriteria = JSON.parse(selectedCriteriaFromStorage);
|
||||
array_data.push(selectedCriteria);
|
||||
|
||||
// DataGrid Data
|
||||
let gridDataFromStorage = localStorage.getItem('gridData');
|
||||
let GridData = JSON.parse(gridDataFromStorage);
|
||||
array_data.push(GridData);
|
||||
// relativeWRData
|
||||
let relativeWRDataFromStorage = localStorage.getItem('relativeWRData');
|
||||
let relativeWRData = JSON.parse(relativeWRDataFromStorage);
|
||||
array_data.push(relativeWRData);
|
||||
//immediateWRData
|
||||
let immediateWRDataFromStorage = localStorage.getItem('immediateWRData');
|
||||
let immediateWRData = JSON.parse(immediateWRDataFromStorage);
|
||||
array_data.push(immediateWRData);
|
||||
// evaluation Results
|
||||
let evaluationResultsFromStorage = localStorage.getItem('evaluationResults');
|
||||
let evaluationResults = JSON.parse(evaluationResultsFromStorage);
|
||||
array_data.push(evaluationResults.results); // Save only th results not the LPStatus
|
||||
|
||||
let result = await this.saveProjectData(array_data);
|
||||
console.log(result);
|
||||
|
||||
// Clear local storage
|
||||
localStorage.removeItem('evaluationResults');
|
||||
localStorage.removeItem('selectedCriteria');
|
||||
localStorage.removeItem('NodeNames');
|
||||
localStorage.removeItem('gridData');
|
||||
localStorage.removeItem('relativeWRData');
|
||||
localStorage.removeItem('immediateWRData');
|
||||
// localStorage.removeItem('fog_broker_user_uuid'); May keep them so the user can evaluate again
|
||||
// localStorage.removeItem('fog_broker_app_id');
|
||||
|
||||
// Redirect to the Home page
|
||||
this.$router.push({ name: 'HomePage' });
|
||||
}
|
||||
else {
|
||||
console.log('Project not saved.');
|
||||
}
|
||||
},
|
||||
fetchResults() {
|
||||
fetch('http://127.0.0.1:5000/get-evaluation-results')
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
this.results = data;
|
||||
this.loading = false;
|
||||
this.createCharts();
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error fetching results:', error);
|
||||
this.loading = false;
|
||||
async saveProjectData(data) {
|
||||
try {
|
||||
const response = await fetch(apiURL+'/app/save', {
|
||||
method: 'POST',
|
||||
headers: {'Content-Type': 'application/json'},
|
||||
body: JSON.stringify(data)
|
||||
});
|
||||
console.log("response" + response);
|
||||
let response_data = await response.json();
|
||||
console.log(response_data);
|
||||
} catch (error) {
|
||||
console.error('Error saving project:', error);
|
||||
}
|
||||
},
|
||||
createCharts() {
|
||||
if (!this.results || this.results.length === 0) {
|
||||
console.error('No results data available to create charts.');
|
||||
return;
|
||||
}
|
||||
console.log(this.results);
|
||||
|
||||
const titles = this.results.map(result => result.Title);
|
||||
const deaScores = this.results.map(result => result['DEA Score']);
|
||||
const ranks = this.results.map(result => result.Rank);
|
||||
@ -148,7 +238,7 @@ export default {
|
||||
options: {
|
||||
responsive: true,
|
||||
//maintainAspectRatio: false, // Set to false to allow full width and controlled height
|
||||
indexAxis: 'y', // This will make the bar chart horizontal
|
||||
indexAxis: 'y', // This makes the bar chart horizontal
|
||||
scales: {
|
||||
x: {
|
||||
beginAtZero: true
|
||||
@ -175,33 +265,39 @@ export default {
|
||||
return percentage === '100.00' ? '100%' : `${percentage}%`;
|
||||
}
|
||||
}
|
||||
};
|
||||
</script>
|
||||
};
|
||||
</script>
|
||||
|
||||
<style>
|
||||
.results-container {
|
||||
<style>
|
||||
.results-container {
|
||||
padding: 20px;
|
||||
}
|
||||
}
|
||||
|
||||
.loading {
|
||||
.loading {
|
||||
text-align: center;
|
||||
}
|
||||
}
|
||||
|
||||
.charts-container {
|
||||
.charts-container {
|
||||
display: flex;
|
||||
flex-direction: row; /* Align charts horizontally */
|
||||
justify-content: space-around;
|
||||
padding: 0 20px; /* Add padding if needed */
|
||||
}
|
||||
}
|
||||
|
||||
.chart-wrapper {
|
||||
.chart-wrapper {
|
||||
flex: 1; /* Each chart will take equal space */
|
||||
/* Remove max-width or set it to a higher value if you want a specific limit */
|
||||
margin: auto;
|
||||
}
|
||||
}
|
||||
|
||||
td {
|
||||
td {
|
||||
text-align: center;
|
||||
}
|
||||
}
|
||||
|
||||
</style>
|
||||
.separator-line {
|
||||
height: 4px; /* Thickness of the line */
|
||||
background-color: #172135; /* Deep purple color */
|
||||
margin: 10px 0; /* Spacing above and below the line */
|
||||
}
|
||||
|
||||
</style>
|
||||
|
@ -1,6 +1,12 @@
|
||||
<template>
|
||||
<div class="wr-container">
|
||||
<div v-for="(condition, index) in conditions" :key="index" class="condition-row">
|
||||
<!-- Relative constraints section -->
|
||||
<div class="relative-constraints">
|
||||
<h2>Relative Constraints</h2>
|
||||
<p class="description">
|
||||
Set relative constraints between the criteria. For example, "Weight of Criterion A >= 2* Weight of Criterion B".
|
||||
</p>
|
||||
<div v-for="(condition, index) in relativeConditions" :key="index" class="condition-row">
|
||||
<select v-model="condition.column1" @change="updateDropdowns(index)">
|
||||
<option value="" disabled>Select Criterion</option>
|
||||
<option v-for="col in availableColumns(index, 1)" :key="`1-${col}`" :value="col">{{ col }}</option>
|
||||
@ -11,87 +17,161 @@
|
||||
<option v-for="op in operators" :key="op" :value="op">{{ op }}</option>
|
||||
</select>
|
||||
|
||||
<input type="number" v-model.number="condition.value" :min="0" placeholder="Value" />
|
||||
<!-- <select v-model="condition.operator">-->
|
||||
<!-- <option value="" disabled>Select Operator</option>-->
|
||||
<!-- <option v-for="(value, key) in operatorMapping" :key="key" :value="value">{{ key }}</option>-->
|
||||
<!-- </select>-->
|
||||
|
||||
<input type="number" v-model.number="condition.value" :min="0" step="0.5" placeholder="Value" />
|
||||
|
||||
<select v-model="condition.column2" @change="updateDropdowns(index)">
|
||||
<option value="" disabled>Select Criterion</option>
|
||||
<option v-for="col in availableColumns(index, 2)" :key="`2-${col}`" :value="col">{{ col }}</option>
|
||||
</select>
|
||||
|
||||
<button @click="removeCondition(index)">-</button>
|
||||
</div>
|
||||
<button @click="addCondition">+ Add Relative Constraint</button>
|
||||
</div>
|
||||
<!-- Separator Line -->
|
||||
<div class="separator-line"></div>
|
||||
<div class="immediate-constraints">
|
||||
<h2>Immediate Constraints</h2>
|
||||
<p class="description">
|
||||
Set immediate constraints on individual criteria. For example, "Weight of Criterion A >= 0.25".
|
||||
</p>
|
||||
<div v-for="(immediateCondition, index) in immediateConditions" :key="`immediate-${index}`" class="condition-row">
|
||||
<select v-model="immediateCondition.criterion">
|
||||
<option value="" disabled>Select Criterion</option>
|
||||
<option v-for="col in criteria_titles" :key="`immediate-${col}`" :value="col">{{ col }}</option>
|
||||
</select>
|
||||
|
||||
<button @click="addCondition">+</button>
|
||||
<!-- <button @click.prevent="sendWRData">Run Evaluation</button> -->
|
||||
<button @click="sendWRData">Run Evaluation</button>
|
||||
<select v-model="immediateCondition.operator">
|
||||
<option value="" disabled>Select Operator</option>
|
||||
<option v-for="op in operators" :key="op" :value="op">{{ op }}</option>
|
||||
</select>
|
||||
|
||||
<!-- <select v-model="immediateCondition.operator">-->
|
||||
<!-- <option value="" disabled>Select Operator</option>-->
|
||||
<!-- <option v-for="(value, key) in operatorMapping" :key="key" :value="value">{{ key }}</option>-->
|
||||
<!-- </select>-->
|
||||
|
||||
<input type="number" v-model.number="immediateCondition.value" :min="0" step="0.1" placeholder="Value" />
|
||||
|
||||
<button @click="removeImmediateCondition(index)">-</button>
|
||||
</div>
|
||||
|
||||
<button @click="addImmediateCondition">+ Add Immediate Constraint</button>
|
||||
</div>
|
||||
<!-- Separator Line -->
|
||||
<div class="separator-line"></div>
|
||||
<div class="pt-4"></div>
|
||||
<div class="button-container">
|
||||
<button @click="goBackToCriteriaSelection" class="bg-color-primary">Back to Criteria Selection</button>
|
||||
<button @click="sendWRData" class="bg-color-primary">Run Evaluation</button>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { useRouter } from 'vue-router';
|
||||
export const backendURL = import.meta.env.VITE_BACKEND_URL;
|
||||
const apiURL = backendURL;
|
||||
export default {
|
||||
data() {
|
||||
return {
|
||||
receivedGridData: null,
|
||||
conditions: [{ column1: '', operator: '', value: 0, column2: '' }],
|
||||
relativeConditions: [{ column1: '', operator: '', value: 0, column2: '' }],
|
||||
criteria_titles: [], // This is populated with the column titles
|
||||
operators: ['>=', '=', '<='],
|
||||
immediateConditions: [{ criterion: '', operator: '', value: 0 }],
|
||||
operatorMapping: {
|
||||
'>=': 1,
|
||||
'=': 0,
|
||||
'<=': -1
|
||||
},
|
||||
errorMessage: '', // Add this line
|
||||
};
|
||||
},
|
||||
mounted() {
|
||||
// Prioritize data from route parameters
|
||||
if (this.$route.params.data) {
|
||||
// Parse the JSON string back into an object
|
||||
this.receivedGridData = JSON.parse(this.$route.params.data);
|
||||
} else {
|
||||
// Fallback to localStorage if route params are not available
|
||||
const gridDataFromStorage = localStorage.getItem('gridData');
|
||||
if (gridDataFromStorage) {
|
||||
this.receivedGridData = JSON.parse(gridDataFromStorage);
|
||||
}
|
||||
}
|
||||
|
||||
// Continue with other localStorage checks
|
||||
const wrDataFromStorage = localStorage.getItem('wrData');
|
||||
const immediateWRDataFromStorage = localStorage.getItem('immediateWRData');
|
||||
|
||||
if (wrDataFromStorage) {
|
||||
this.wrData = JSON.parse(wrDataFromStorage);
|
||||
}
|
||||
|
||||
if (immediateWRDataFromStorage) {
|
||||
this.immediateConditions = JSON.parse(immediateWRDataFromStorage);
|
||||
} else {
|
||||
this.immediateConditions = [{ criterion: '', operator: '', value: 0 }];
|
||||
}
|
||||
|
||||
// Retrieve selectedCriteria from local storage
|
||||
const selectedCriteriaJson = localStorage.getItem('selectedCriteria');
|
||||
if (selectedCriteriaJson) {
|
||||
try {
|
||||
const selectedCriteria = JSON.parse(selectedCriteriaJson);
|
||||
// Use selectedCriteria to populate criteria_titles and filter out boolean criteria (type 5)
|
||||
this.criteria_titles = selectedCriteria
|
||||
.filter(info => info.type !== 5)
|
||||
.map(info => info.title);
|
||||
} catch (e) {
|
||||
console.error('Error parsing selected criteria information:', e);
|
||||
this.$router.push({ name: 'CriteriaSelection' });
|
||||
}
|
||||
} else {
|
||||
console.error('Error: Selected criteria information not found in local storage.');
|
||||
this.$router.push({ name: 'CriteriaSelection' });
|
||||
}
|
||||
console.log('WR.vue Received gridData:', this.receivedGridData);
|
||||
this.fetchCriteriaTitles();
|
||||
},
|
||||
methods: {
|
||||
fetchCriteriaTitles() {
|
||||
fetch('http://127.0.0.1:5000/get-criteria-titles')
|
||||
.then(response => {
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
}
|
||||
return response.json();
|
||||
})
|
||||
.then(data => {
|
||||
this.criteria_titles = data;
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error fetching criteria titles:', error);
|
||||
});
|
||||
},
|
||||
addCondition() {
|
||||
this.conditions.push({column1: '', column2: '', operator: '', value: 0});
|
||||
this.relativeConditions.push({column1: '', column2: '', operator: '', value: 0});
|
||||
},
|
||||
removeCondition(index) {
|
||||
this.conditions.splice(index, 1);
|
||||
this.relativeConditions.splice(index, 1);
|
||||
},
|
||||
validateForm() {
|
||||
for (const condition of this.conditions) {
|
||||
for (const condition of this.relativeConditions) {
|
||||
if (!condition.column1 || !condition.column2) {
|
||||
alert('Please select criteria for each relative constraint.');
|
||||
return false;
|
||||
}
|
||||
if (!condition.operator) {
|
||||
alert('Please select an operator for each condition.');
|
||||
alert('Please select an operator for each relative constraint.');
|
||||
return false;
|
||||
}
|
||||
if (condition.value === null || condition.value === '') {
|
||||
alert('Please enter a numeric value for each.');
|
||||
alert('Please enter a numeric value for each relative constraint.');
|
||||
return false;
|
||||
}
|
||||
if (condition.value < 0) {
|
||||
alert('Values cannot be less than zero.');
|
||||
if (condition.value <= 0) {
|
||||
alert('The priority in each relative constraint must be greater than zero.');
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
const uniquePairs = new Set(
|
||||
this.conditions.map(c => [c.column1, c.column2].sort().join('-'))
|
||||
this.relativeConditions.map(c => [c.column1, c.column2].sort().join('-'))
|
||||
);
|
||||
|
||||
if (uniquePairs.size !== this.conditions.length) {
|
||||
if (uniquePairs.size !== this.relativeConditions.length) {
|
||||
alert('Each pair of criteria can only be used once in a restriction!');
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
},
|
||||
updateDropdowns(index) {
|
||||
@ -100,15 +180,73 @@ export default {
|
||||
availableColumns(index, dropdownNumber) {
|
||||
if (dropdownNumber === 1) {
|
||||
// For the first dropdown, filter out the column selected in the second dropdown
|
||||
return this.criteria_titles.filter(col => col !== this.conditions[index].column2);
|
||||
return this.criteria_titles.filter(col => col !== this.relativeConditions[index].column2);
|
||||
} else {
|
||||
// For the second dropdown, filter out the column selected in the first dropdown
|
||||
return this.criteria_titles.filter(col => col !== this.conditions[index].column1);
|
||||
return this.criteria_titles.filter(col => col !== this.relativeConditions[index].column1);
|
||||
}
|
||||
},
|
||||
// Add a method to validate the Immediate Constraints
|
||||
validateImmediateConstraints() {
|
||||
let criterionConstraints = {};
|
||||
|
||||
// Iterate over immediate conditions and organize them by criterion
|
||||
for (const condition of this.immediateConditions) {
|
||||
if (!condition.criterion || !condition.operator) {
|
||||
continue; // Skip empty conditions
|
||||
}
|
||||
|
||||
// Ensure value is greater than 0
|
||||
if (condition.value === null || condition.value === '' || condition.value <= 0) {
|
||||
alert(`The importance of criterion "${condition.criterion}" should be greater than 0.`);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Initialize the constraints list for the criterion if not already done
|
||||
if (!criterionConstraints[condition.criterion]) {
|
||||
criterionConstraints[condition.criterion] = {};
|
||||
}
|
||||
|
||||
// Check for duplicate operators
|
||||
if (criterionConstraints[condition.criterion][condition.operator]) {
|
||||
alert(`You cannot use the same operator more than once for the criterion "${condition.criterion}".`);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Add the condition to the list for the criterion
|
||||
criterionConstraints[condition.criterion][condition.operator] = condition.value;
|
||||
}
|
||||
|
||||
// Iterate over the constraints for each criterion and apply validation rules
|
||||
for (const [criterion, operators] of Object.entries(criterionConstraints)) {
|
||||
// Only one constraint allowed when using '=' operator
|
||||
if (operators['='] !== undefined && Object.keys(operators).length > 1) {
|
||||
alert(`Only one constraint allowed for '${criterion}' when using '=' operator.`);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Validate logical consistency between '>=' and '<=' values
|
||||
if (operators['>='] !== undefined && operators['<='] !== undefined) {
|
||||
const greaterThanOrEqualValue = parseFloat(operators['>=']);
|
||||
const lessThanOrEqualValue = parseFloat(operators['<=']);
|
||||
|
||||
if (isNaN(greaterThanOrEqualValue) || isNaN(lessThanOrEqualValue)) {
|
||||
alert(`Invalid numeric values for the criterion "${criterion}".`);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (greaterThanOrEqualValue > lessThanOrEqualValue) {
|
||||
alert(`For the criterion "${criterion}", the value for '>=' must be less than or equal to the value for '<='.`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
},
|
||||
validateNonInvertedConditions() {
|
||||
let isValid = true;
|
||||
let conditionPairs = this.conditions.map(c => [c.column1, c.column2].sort().join('-'));
|
||||
let conditionPairs = this.relativeConditions.map(c => [c.column1, c.column2].sort().join('-'));
|
||||
|
||||
// Create a Set for unique pairs
|
||||
const uniquePairs = new Set(conditionPairs);
|
||||
@ -120,94 +258,122 @@ export default {
|
||||
|
||||
return isValid;
|
||||
},
|
||||
addImmediateCondition() {
|
||||
this.immediateConditions.push({ criterion: '', operator: '', value: 0 });
|
||||
},
|
||||
removeImmediateCondition(index) {
|
||||
this.immediateConditions.splice(index, 1);
|
||||
},
|
||||
async sendWRData() {
|
||||
// Check if any condition is set
|
||||
const isAnyConditionSet = this.conditions.some(condition => condition.column1 && condition.column2 && condition.operator);
|
||||
// Check if any relative or immediate condition is set
|
||||
const isAnyRelativeConditionSet = this.relativeConditions.some(condition => condition.column1 && condition.column2 && condition.operator);
|
||||
const isAnyImmediateConditionSet = this.immediateConditions.some(condition => condition.criterion && condition.operator);
|
||||
|
||||
// If no conditions are set, prompt the user
|
||||
if (!isAnyConditionSet) {
|
||||
// Filter out incomplete or empty relative constraints
|
||||
const validRelativeConditions = this.relativeConditions.filter(condition => condition.column1 && condition.column2 && condition.operator);
|
||||
// Filter out incomplete or empty immediate constraints
|
||||
const validImmediateConditions = this.immediateConditions.filter(condition => condition.criterion && condition.operator);
|
||||
|
||||
// Prompt the user if no conditions are set
|
||||
if (!isAnyRelativeConditionSet && !isAnyImmediateConditionSet) {
|
||||
const proceedWithoutWR = confirm("Would you like to proceed without imposing Weight Restrictions?");
|
||||
if (!proceedWithoutWR) {
|
||||
// User chose 'No', do nothing to stay on the current page
|
||||
return;
|
||||
return; // User chose 'No', do nothing
|
||||
}
|
||||
// User chose 'Yes', proceed with sending data
|
||||
// Clear data if user chose 'Yes'
|
||||
this.relativeConditions = [];
|
||||
this.immediateConditions = [];
|
||||
} else {
|
||||
// Validate the form only if there are conditions set
|
||||
if (!this.validateForm() || !this.validateNonInvertedConditions()) {
|
||||
alert('Invalid Weight Restrictions, each pair of criteria can be used only once!');
|
||||
// Validate conditions
|
||||
if ((isAnyRelativeConditionSet && (!this.validateForm() || !this.validateNonInvertedConditions())) ||
|
||||
(isAnyImmediateConditionSet && (!this.validateImmediateConstraints()))) {
|
||||
return; // Stop if validation fails
|
||||
}
|
||||
}
|
||||
|
||||
const operatorMapping = {
|
||||
'<=': -1,
|
||||
'=': 0,
|
||||
'>=': 1
|
||||
};
|
||||
|
||||
const processedWRData = this.conditions.map(condition => {
|
||||
return {
|
||||
// Process Relative constraints
|
||||
const relativeWRData = validRelativeConditions.map(condition => ({
|
||||
LHSCriterion: condition.column1,
|
||||
Operator: operatorMapping[condition.operator],
|
||||
Operator: this.operatorMapping[condition.operator],
|
||||
Intense: condition.value,
|
||||
RHSCriterion: condition.column2
|
||||
};
|
||||
});
|
||||
}));
|
||||
|
||||
// Process Immediate constraints
|
||||
const immediateWRData = validImmediateConditions.map(condition => ({
|
||||
Criterion: condition.criterion,
|
||||
Operator: this.operatorMapping[condition.operator],
|
||||
Value: condition.value
|
||||
}));
|
||||
|
||||
// Retrieve node names from local storage
|
||||
let nodeNamesArray = [];
|
||||
const NodeNamesString = localStorage.getItem('NodeNames');
|
||||
if (NodeNamesString) {
|
||||
nodeNamesArray = JSON.parse(NodeNamesString);
|
||||
}
|
||||
|
||||
// Prepare payload with filtered conditions
|
||||
const payload = {
|
||||
gridData: this.receivedGridData, // Data received from DataGrid.vue
|
||||
wrData: processedWRData
|
||||
gridData: this.receivedGridData,
|
||||
relativeWRData: relativeWRData,
|
||||
immediateWRData: immediateWRData,
|
||||
nodeNames: nodeNamesArray
|
||||
};
|
||||
console.log('Payload being sent to backend from WR.vue:', payload);
|
||||
|
||||
// Ask the backend to perform evaluation
|
||||
try {
|
||||
const response = await fetch('http://127.0.0.1:5000/process-evaluation-data', {
|
||||
const response = await fetch(apiURL+'/process-evaluation-data', {
|
||||
method: 'POST',
|
||||
headers: {'Content-Type': 'application/json'},
|
||||
body: JSON.stringify(payload)
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
// If the HTTP response is not OK, throw an error
|
||||
throw new Error('Network response was not ok');
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
console.log('Response from backend:', data);
|
||||
console.log('Response from backend process-evaluation-data():', data);
|
||||
console.log('Response data.results.LPstatus:', data.results.LPstatus);
|
||||
|
||||
// Check if the response was successful
|
||||
if (response.ok && data.status === 'success') {
|
||||
// Redirect to Results.vue
|
||||
this.$router.push({ name: 'Results' });
|
||||
} /*else {
|
||||
// Handle error
|
||||
console.error('Error in response:', data.message);
|
||||
alert('Failed to process data: ' + data.message);
|
||||
} */
|
||||
} catch (error) {
|
||||
console.error('Error sending data to backend:', error);
|
||||
alert('Failed to send data to backend.');
|
||||
// First, check the general status of the response to confirm the request was processed successfully
|
||||
// Check the LP problem's feasibility status
|
||||
if (data.status === 'success') {
|
||||
if (data.results.LPstatus === 'feasible') {
|
||||
localStorage.setItem('evaluationResults', JSON.stringify(data.results));
|
||||
localStorage.setItem('relativeWRData', JSON.stringify(relativeWRData));
|
||||
localStorage.setItem('immediateWRData', JSON.stringify(immediateWRData));
|
||||
|
||||
// Navigate to Results.vue
|
||||
this.$router.push({ name: 'Results', params: { evaluationResults: data.results.results } });
|
||||
} else if (data.results.LPstatus === 'infeasible') {
|
||||
// Set the error message for infeasible LP solution
|
||||
this.errorMessage = data.results.message; // Accessing the message directly
|
||||
alert(this.errorMessage); // Show the message to the user via alert
|
||||
}
|
||||
} else {
|
||||
// Handle other unexpected 'status'
|
||||
this.errorMessage = 'An unexpected error occurred.';
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error:', error);
|
||||
this.errorMessage = error.message || 'Failed to send data to backend.';
|
||||
}
|
||||
|
||||
},
|
||||
sendDataToBackend(payload) {
|
||||
console.log('Sending payload to backend:', payload);
|
||||
fetch('http://127.0.0.1:5000/process-evaluation-data', {
|
||||
method: 'POST',
|
||||
headers: {'Content-Type': 'application/json'},
|
||||
body: JSON.stringify(payload)
|
||||
})
|
||||
.then(response => {
|
||||
console.log('Raw response:', response);
|
||||
return response.json();
|
||||
})
|
||||
.then(data => {
|
||||
console.log('Response from backend:', data);
|
||||
// Handle the response from the backend
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error sending data to backend:', error);
|
||||
});
|
||||
goBackToCriteriaSelection() {
|
||||
this.$router.push({ name: 'CriteriaSelection' });
|
||||
}
|
||||
}
|
||||
};
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
input{height: 40px;}
|
||||
|
||||
.wr-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
@ -218,6 +384,7 @@ export default {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
margin-bottom: 15px;
|
||||
}
|
||||
|
||||
button {
|
||||
@ -233,7 +400,7 @@ button {
|
||||
button:hover {
|
||||
background-color: var(--secondary-color); /* Lighter shade of purple on hover */
|
||||
color: var(--main-color);
|
||||
border:2px;
|
||||
border-color:var(--main-color);
|
||||
border: 2px;
|
||||
border-color: var(--main-color);
|
||||
}
|
||||
</style>
|
357
cfsb-frontend/src/components/WR_AllCriteria.vue
Normal file
357
cfsb-frontend/src/components/WR_AllCriteria.vue
Normal file
@ -0,0 +1,357 @@
|
||||
<template>
|
||||
<div class="wr-container">
|
||||
<!-- Relative constraints section -->
|
||||
<div class="relative-constraints">
|
||||
<h3>Relative Constraints</h3>
|
||||
<p class="description">
|
||||
Set relative constraints between the criteria. For example, "Weight of Criterion A >= 2* Weight of Criterion B".
|
||||
</p>
|
||||
<div v-for="(condition, index) in relativeConditions" :key="index" class="condition-row">
|
||||
<select v-model="condition.column1" @change="updateDropdowns(index)">
|
||||
<option value="" disabled>Select Criterion</option>
|
||||
<option v-for="col in availableColumns(index, 1)" :key="`1-${col}`" :value="col">{{ col }}</option>
|
||||
</select>
|
||||
|
||||
<select v-model="condition.operator">
|
||||
<option value="" disabled>Select Operator</option>
|
||||
<option v-for="op in operators" :key="op" :value="op">{{ op }}</option>
|
||||
</select>
|
||||
|
||||
<input type="number" v-model.number="condition.value" :min="0" step="0.5" placeholder="Value" />
|
||||
|
||||
<select v-model="condition.column2" @change="updateDropdowns(index)">
|
||||
<option value="" disabled>Select Criterion</option>
|
||||
<option v-for="col in availableColumns(index, 2)" :key="`2-${col}`" :value="col">{{ col }}</option>
|
||||
</select>
|
||||
<button @click="removeCondition(index)">-</button>
|
||||
</div>
|
||||
<button @click="addCondition">+ Add Relative Constraint</button>
|
||||
</div>
|
||||
<div class="immediate-constraints">
|
||||
<h3>Immediate Constraints</h3>
|
||||
<p class="description">
|
||||
Set immediate constraints on individual criteria. For example, "Weight of Criterion A >= 0.25".
|
||||
</p>
|
||||
<div v-for="(immediateCondition, index) in immediateConditions" :key="`immediate-${index}`" class="condition-row">
|
||||
<select v-model="immediateCondition.criterion">
|
||||
<option value="" disabled>Select Criterion</option>
|
||||
<option v-for="col in criteria_titles" :key="`immediate-${col}`" :value="col">{{ col }}</option>
|
||||
</select>
|
||||
|
||||
<select v-model="immediateCondition.operator">
|
||||
<option value="" disabled>Select Operator</option>
|
||||
<option v-for="op in operators" :key="op" :value="op">{{ op }}</option>
|
||||
</select>
|
||||
|
||||
<input type="number" v-model.number="immediateCondition.value" :min="0" step="0.1" placeholder="Value" />
|
||||
|
||||
<button @click="removeImmediateCondition(index)">-</button>
|
||||
</div>
|
||||
|
||||
<button @click="addImmediateCondition">+ Add Immediate Constraint</button>
|
||||
</div>
|
||||
<button @click="sendWRData">Run Evaluation</button>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export const backendURL = import.meta.env.VITE_BACKEND_URL;
|
||||
const apiURL = backendURL;
|
||||
import {useRouter} from 'vue-router';
|
||||
|
||||
export default {
|
||||
data() {
|
||||
return {
|
||||
receivedGridData: null,
|
||||
relativeConditions: [{column1: '', operator: '', value: 0, column2: ''}],
|
||||
criteria_titles: [], // This is populated with the column titles
|
||||
operators: ['>=', '=', '<='],
|
||||
immediateConditions: [{criterion: '', operator: '', value: 0}],
|
||||
operatorMapping: {
|
||||
'<=': -1,
|
||||
'=': 0,
|
||||
'>=': 1
|
||||
},
|
||||
};
|
||||
},
|
||||
mounted() {
|
||||
if (this.$route.params.data) {
|
||||
// Parse the JSON string back into an object
|
||||
this.receivedGridData = JSON.parse(this.$route.params.data);
|
||||
}
|
||||
|
||||
const gridDataFromStorage = localStorage.getItem('gridData');
|
||||
const wrDataFromStorage = localStorage.getItem('wrData');
|
||||
const immediateWRDataFromStorage = localStorage.getItem('immediateWRData');
|
||||
|
||||
if (gridDataFromStorage) {
|
||||
this.receivedGridData = JSON.parse(gridDataFromStorage);
|
||||
}
|
||||
|
||||
if (wrDataFromStorage) {
|
||||
this.wrData = JSON.parse(wrDataFromStorage);
|
||||
}
|
||||
|
||||
if (immediateWRDataFromStorage) {
|
||||
this.immediateConditions = JSON.parse(immediateWRDataFromStorage);
|
||||
} else {
|
||||
// Reset immediateConditions if there is no stored data
|
||||
this.immediateConditions = [{criterion: '', operator: '', value: 0}];
|
||||
}
|
||||
|
||||
this.fetchCriteriaTitles();
|
||||
|
||||
},
|
||||
methods: {
|
||||
fetchCriteriaTitles() {
|
||||
fetch(apiURL+'/get-criteria-titles')
|
||||
.then(response => {
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
}
|
||||
return response.json();
|
||||
})
|
||||
.then(data => {
|
||||
this.criteria_titles = data;
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error fetching criteria titles:', error);
|
||||
});
|
||||
},
|
||||
addCondition() {
|
||||
this.relativeConditions.push({column1: '', column2: '', operator: '', value: 0});
|
||||
},
|
||||
removeCondition(index) {
|
||||
this.relativeConditions.splice(index, 1);
|
||||
},
|
||||
validateForm() {
|
||||
for (const condition of this.relativeConditions) {
|
||||
if (!condition.column1 || !condition.column2) {
|
||||
alert('Please select criteria for each relative constraint.');
|
||||
return false;
|
||||
}
|
||||
if (!condition.operator) {
|
||||
alert('Please select an operator for each relative constraint.');
|
||||
return false;
|
||||
}
|
||||
if (condition.value === null || condition.value === '') {
|
||||
alert('Please enter a numeric value for each relative constraint.');
|
||||
return false;
|
||||
}
|
||||
if (condition.value <= 0) {
|
||||
alert('The priority in each relative constraint must be greater than zero.');
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
const uniquePairs = new Set(
|
||||
this.relativeConditions.map(c => [c.column1, c.column2].sort().join('-'))
|
||||
);
|
||||
|
||||
if (uniquePairs.size !== this.relativeConditions.length) {
|
||||
alert('Each pair of criteria can only be used once in a restriction!');
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
},
|
||||
updateDropdowns(index) {
|
||||
// May be used to update dropdown availability
|
||||
},
|
||||
availableColumns(index, dropdownNumber) {
|
||||
if (dropdownNumber === 1) {
|
||||
// For the first dropdown, filter out the column selected in the second dropdown
|
||||
return this.criteria_titles.filter(col => col !== this.relativeConditions[index].column2);
|
||||
} else {
|
||||
// For the second dropdown, filter out the column selected in the first dropdown
|
||||
return this.criteria_titles.filter(col => col !== this.relativeConditions[index].column1);
|
||||
}
|
||||
},
|
||||
// Add a method to validate the Immediate Constraints
|
||||
validateImmediateConstraints() {
|
||||
let criterionConstraints = {};
|
||||
|
||||
// Iterate over immediate conditions and organize them by criterion
|
||||
for (const condition of this.immediateConditions) {
|
||||
if (!condition.criterion || !condition.operator) {
|
||||
continue; // Skip empty conditions
|
||||
}
|
||||
|
||||
// Ensure value is greater than 0
|
||||
if (condition.value === null || condition.value === '' || condition.value <= 0) {
|
||||
alert(`The importance of criterion "${condition.criterion}" should be greater than 0.`);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Initialize the constraints list for the criterion if not already done
|
||||
if (!criterionConstraints[condition.criterion]) {
|
||||
criterionConstraints[condition.criterion] = {};
|
||||
}
|
||||
|
||||
// Check for duplicate operators
|
||||
if (criterionConstraints[condition.criterion][condition.operator]) {
|
||||
alert(`You cannot use the same operator more than once for the criterion "${condition.criterion}".`);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Add the condition to the list for the criterion
|
||||
criterionConstraints[condition.criterion][condition.operator] = condition.value;
|
||||
}
|
||||
|
||||
// Iterate over the constraints for each criterion and apply validation rules
|
||||
for (const [criterion, operators] of Object.entries(criterionConstraints)) {
|
||||
// Only one constraint allowed when using '=' operator
|
||||
if (operators['='] !== undefined && Object.keys(operators).length > 1) {
|
||||
alert(`Only one constraint allowed for '${criterion}' when using '=' operator.`);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Validate logical consistency between '>=' and '<=' values
|
||||
if (operators['>='] !== undefined && operators['<='] !== undefined) {
|
||||
const greaterThanOrEqualValue = parseFloat(operators['>=']);
|
||||
const lessThanOrEqualValue = parseFloat(operators['<=']);
|
||||
|
||||
if (isNaN(greaterThanOrEqualValue) || isNaN(lessThanOrEqualValue)) {
|
||||
alert(`Invalid numeric values for the criterion "${criterion}".`);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (greaterThanOrEqualValue > lessThanOrEqualValue) {
|
||||
alert(`For the criterion "${criterion}", the value for '>=' must be less than or equal to the value for '<='.`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
},
|
||||
validateNonInvertedConditions() {
|
||||
let isValid = true;
|
||||
let conditionPairs = this.relativeConditions.map(c => [c.column1, c.column2].sort().join('-'));
|
||||
|
||||
// Create a Set for unique pairs
|
||||
const uniquePairs = new Set(conditionPairs);
|
||||
|
||||
if (uniquePairs.size !== conditionPairs.length) {
|
||||
// There are duplicates
|
||||
isValid = false;
|
||||
}
|
||||
|
||||
return isValid;
|
||||
},
|
||||
addImmediateCondition() {
|
||||
this.immediateConditions.push({criterion: '', operator: '', value: 0});
|
||||
},
|
||||
removeImmediateCondition(index) {
|
||||
this.immediateConditions.splice(index, 1);
|
||||
},
|
||||
async sendWRData() {
|
||||
const operatorMapping = {'<=': -1, '=': 0, '>=': 1};
|
||||
// Check if any relative or immediate condition is set
|
||||
const isAnyRelativeConditionSet = this.relativeConditions.some(condition => condition.column1 && condition.column2 && condition.operator);
|
||||
const isAnyImmediateConditionSet = this.immediateConditions.some(condition => condition.criterion && condition.operator);
|
||||
|
||||
// Filter out incomplete or empty relative constraints
|
||||
const validRelativeConditions = this.relativeConditions.filter(condition => condition.column1 && condition.column2 && condition.operator);
|
||||
// Filter out incomplete or empty immediate constraints
|
||||
const validImmediateConditions = this.immediateConditions.filter(condition => condition.criterion && condition.operator);
|
||||
|
||||
// Prompt the user if no conditions are set
|
||||
if (!isAnyRelativeConditionSet && !isAnyImmediateConditionSet) {
|
||||
const proceedWithoutWR = confirm("Would you like to proceed without imposing Weight Restrictions?");
|
||||
if (!proceedWithoutWR) {
|
||||
return; // User chose 'No', do nothing
|
||||
}
|
||||
// Clear data if user chose 'Yes'
|
||||
this.relativeConditions = [];
|
||||
this.immediateConditions = [];
|
||||
} else {
|
||||
// Validate conditions
|
||||
if ((isAnyRelativeConditionSet && (!this.validateForm() || !this.validateNonInvertedConditions())) ||
|
||||
(isAnyImmediateConditionSet && (!this.validateImmediateConstraints()))) {
|
||||
return; // Stop if validation fails
|
||||
}
|
||||
}
|
||||
|
||||
// Process Relative constraints
|
||||
const RelativeWRData = validRelativeConditions.map(condition => ({
|
||||
LHSCriterion: condition.column1,
|
||||
Operator: this.operatorMapping[condition.operator],
|
||||
Intense: condition.value,
|
||||
RHSCriterion: condition.column2
|
||||
}));
|
||||
|
||||
// Process Immediate constraints
|
||||
const immediateWRData = validImmediateConditions.map(condition => ({
|
||||
Criterion: condition.criterion,
|
||||
Operator: this.operatorMapping[condition.operator],
|
||||
Value: condition.value
|
||||
}));
|
||||
|
||||
// Prepare payload with filtered conditions
|
||||
const payload = {
|
||||
gridData: this.receivedGridData,
|
||||
wrData: RelativeWRData,
|
||||
immediateWRData: immediateWRData
|
||||
};
|
||||
|
||||
|
||||
try {
|
||||
const response = await fetch(apiURL+'/process-evaluation-data', {
|
||||
method: 'POST',
|
||||
headers: {'Content-Type': 'application/json'},
|
||||
body: JSON.stringify(payload)
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
console.log('Response from backend:', data);
|
||||
|
||||
// Check if the response was successful
|
||||
if (response.ok && data.status === 'success') {
|
||||
localStorage.setItem('gridData', JSON.stringify(this.receivedGridData));
|
||||
localStorage.setItem('wrData', JSON.stringify(RelativeWRData));
|
||||
|
||||
this.$router.push({name: 'Results'});
|
||||
} else {
|
||||
console.error('Error in response:', data.message);
|
||||
alert('Failed to process data: ' + data.message);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error sending data to backend:', error);
|
||||
alert('Failed to send data to backend.');
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
.wr-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 10px;
|
||||
}
|
||||
|
||||
.condition-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
}
|
||||
|
||||
button {
|
||||
background-color: var(--main-color); /* Blue color */
|
||||
color: #fff; /* White text color */
|
||||
padding: 10px 15px;
|
||||
border: none;
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
transition: background-color 0.3s ease;
|
||||
}
|
||||
|
||||
button:hover {
|
||||
background-color: var(--secondary-color); /* Lighter shade of purple on hover */
|
||||
color: var(--main-color);
|
||||
border: 2px;
|
||||
border-color: var(--main-color);
|
||||
}
|
||||
</style>
|
@ -1,7 +1,6 @@
|
||||
import { createRouter, createWebHistory } from 'vue-router';
|
||||
import DataGrid from '@/components/DataGrid.vue';
|
||||
import CriteriaSelection from '@/components/CriteriaSelection.vue'; // Import the new component
|
||||
//import SummedData from '@/components/SummedData.vue';
|
||||
import Evaluation from '@/components/Evaluation.vue'; // Import the Evaluation component
|
||||
import WR from '@/components/WR.vue';
|
||||
import Results from '@/components/Results.vue'; // Import the Results component
|
||||
|
@ -1,4 +1,3 @@
|
||||
// router.js
|
||||
import { createRouter, createWebHistory } from 'vue-router';
|
||||
import App from './App.vue';
|
||||
import HierarchicalCategoryList from '@/components/HierarchicalCategoryList.vue';
|
||||
|
@ -0,0 +1,23 @@
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: db-init-script
|
||||
data:
|
||||
db_script.sql: |
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id SERIAL PRIMARY KEY,
|
||||
uuid VARCHAR(255) NOT NULL,
|
||||
username VARCHAR(255) NOT NULL,
|
||||
password VARCHAR(255) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS apps (
|
||||
id SERIAL PRIMARY KEY,
|
||||
user_uuid VARCHAR(255) NOT NULL,
|
||||
title VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
app_id VARCHAR(255) NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO users (username, password, uuid) VALUES ('greg', '12345', 'e3ff4006-be5f-4e00-bbe1-e49a88b2541a');
|
||||
INSERT INTO apps (user_uuid, title, description, app_id) VALUES ('e3ff4006-be5f-4e00-bbe1-e49a88b2541a', 'Demo App', 'Demo App description', '2f7cc63df4b1da7532756f44345758da');
|
@ -0,0 +1,61 @@
|
||||
{{ if .Values.postgresql.enabled }}
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: {{ include "nebulous-cloud-fog-service-broker.fullname" . }}-postgresql
|
||||
labels:
|
||||
{{- include "nebulous-cloud-fog-service-broker.labels" . | nindent 4 }}
|
||||
spec:
|
||||
selector:
|
||||
matchLabels:
|
||||
{{- include "nebulous-cloud-fog-service-broker.selectorLabels" . | nindent 6 }}
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
{{- include "nebulous-cloud-fog-service-broker.selectorLabels" . | nindent 8 }}
|
||||
spec:
|
||||
volumes:
|
||||
- name: postgres-data
|
||||
persistentVolumeClaim:
|
||||
claimName: {{ .Values.postgresql.volumeMounts.data.claimName }}
|
||||
- name: init-script
|
||||
configMap:
|
||||
name: {{ .Values.postgresql.volumeMounts.initScript.configMapName }}
|
||||
containers:
|
||||
- name: postgresql
|
||||
image: "{{ .Values.postgresql.image }}"
|
||||
ports:
|
||||
- name: postgresql
|
||||
containerPort: {{ .Values.postgresql.port }}
|
||||
protocol: TCP
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: "{{ .Values.postgresql.user }}"
|
||||
- name: POSTGRES_PASSWORD
|
||||
value: "{{ .Values.postgresql.password }}"
|
||||
- name: POSTGRES_DB
|
||||
value: "{{ .Values.postgresql.dbName }}"
|
||||
volumeMounts:
|
||||
- name: postgres-data
|
||||
mountPath: /var/lib/postgresql/data/
|
||||
- name: init-script
|
||||
mountPath: /docker-entrypoint-initdb.d
|
||||
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: {{ include "nebulous-cloud-fog-service-broker.fullname" . }}-postgresql
|
||||
labels:
|
||||
{{- include "nebulous-cloud-fog-service-broker.labels" . | nindent 4 }}
|
||||
spec:
|
||||
type: ClusterIP
|
||||
ports:
|
||||
- port: {{ .Values.postgresql.port }}
|
||||
targetPort: postgresql
|
||||
protocol: TCP
|
||||
name: postgresql
|
||||
selector:
|
||||
{{- include "nebulous-cloud-fog-service-broker.selectorLabels" . | nindent 4 }}
|
||||
{{ end }}
|
@ -35,18 +35,29 @@ spec:
|
||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: 8080
|
||||
containerPort: 8001
|
||||
protocol: TCP
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
resources:
|
||||
{{- toYaml .Values.resources | nindent 12 }}
|
||||
env:
|
||||
- name: NEBULOUS_BROKER_URL
|
||||
value: "{{ .Values.customEnv.NEBULOUS_BROKER_URL }}"
|
||||
- name: NEBULOUS_BROKER_PORT
|
||||
value: "{{ .Values.customEnv.NEBULOUS_BROKER_PORT }}"
|
||||
- name: NEBULOUS_BROKER_USERNAME
|
||||
value: "{{ .Values.customEnv.NEBULOUS_BROKER_USERNAME }}"
|
||||
- name: NEBULOUS_BROKER_PASSWORD
|
||||
value: "{{ .Values.customEnv.NEBULOUS_BROKER_PASSWORD }}"
|
||||
- name: POSTGRES_DB_HOST
|
||||
value: "{{ .Values.customEnv.POSTGRES_DB_HOST }}"
|
||||
- name: POSTGRES_DB_NAME
|
||||
value: "{{ .Values.customEnv.POSTGRES_DB_NAME }}"
|
||||
- name: POSTGRES_DB_PORT
|
||||
value: "{{ .Values.customEnv.POSTGRES_DB_PORT }}"
|
||||
- name: POSTGRES_DB_USER
|
||||
value: "{{ .Values.customEnv.POSTGRES_DB_USER }}"
|
||||
- name: POSTGRES_DB_PASS
|
||||
value: "{{ .Values.customEnv.POSTGRES_DB_PASS }}"
|
||||
{{- with .Values.nodeSelector }}
|
||||
nodeSelector:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
|
@ -0,0 +1,10 @@
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: postgresql-pvc
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 1Gi
|
@ -5,7 +5,7 @@
|
||||
replicaCount: 1
|
||||
|
||||
image:
|
||||
repository: "quay.io/nebulous/cloud-fog-service-broker-java-spring-boot-demo"
|
||||
repository: "quay.io/nebulous/cloud-fog-service-broker-backend"
|
||||
pullPolicy: IfNotPresent
|
||||
# Overrides the image tag whose default is the chart appVersion.
|
||||
tag: ""
|
||||
@ -80,3 +80,27 @@ nodeSelector: {}
|
||||
tolerations: []
|
||||
|
||||
affinity: {}
|
||||
|
||||
customEnv:
|
||||
NEBULOUS_BROKER_URL: "nebulous-activemq"
|
||||
NEBULOUS_BROKER_PORT: "61616"
|
||||
NEBULOUS_BROKER_USERNAME: "admin"
|
||||
NEBULOUS_BROKER_PASSWORD: "admin"
|
||||
POSTGRES_DB_HOST: "localhost"
|
||||
POSTGRES_DB_NAME: "fog_broker"
|
||||
POSTGRES_DB_PORT: "5432"
|
||||
POSTGRES_DB_USER: "dbuser"
|
||||
POSTGRES_DB_PASS: "pass123"
|
||||
|
||||
postgresql:
|
||||
enabled: true
|
||||
image: "docker.io/postgres:16"
|
||||
user: "dbuser"
|
||||
password: "pass123"
|
||||
dbName: "fog_broker"
|
||||
port: 5432
|
||||
volumeMounts:
|
||||
data:
|
||||
claimName: "postgresql-pvc"
|
||||
initScript:
|
||||
configMapName: "db-init-script"
|
||||
|
41
docker-compose.yml
Normal file
41
docker-compose.yml
Normal file
@ -0,0 +1,41 @@
|
||||
version: '3.0'
|
||||
services:
|
||||
backend:
|
||||
build:
|
||||
context: ./cfsb-backend
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- "8001:8001"
|
||||
env_file:
|
||||
- ./cfsb-backend/.env.prod
|
||||
depends_on:
|
||||
- db
|
||||
networks:
|
||||
cfsb-network:
|
||||
db:
|
||||
image: postgres:16
|
||||
ports:
|
||||
- "5432:5432"
|
||||
environment:
|
||||
- POSTGRES_USER=dbuser
|
||||
- POSTGRES_PASSWORD=pass123
|
||||
- POSTGRES_DB=fog_broker
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data/
|
||||
- ./cfsb-backend/db/db_script.sql:/docker-entrypoint-initdb.d/db_script.sql
|
||||
networks:
|
||||
cfsb-network:
|
||||
frontend:
|
||||
build:
|
||||
context: ./cfsb-frontend
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- "8080:80"
|
||||
networks:
|
||||
cfsb-network:
|
||||
|
||||
networks:
|
||||
cfsb-network:
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
33
java-spring-boot-demo/.gitignore
vendored
33
java-spring-boot-demo/.gitignore
vendored
@ -1,33 +0,0 @@
|
||||
HELP.md
|
||||
target/
|
||||
!.mvn/wrapper/maven-wrapper.jar
|
||||
!**/src/main/**/target/
|
||||
!**/src/test/**/target/
|
||||
|
||||
### STS ###
|
||||
.apt_generated
|
||||
.classpath
|
||||
.factorypath
|
||||
.project
|
||||
.settings
|
||||
.springBeans
|
||||
.sts4-cache
|
||||
|
||||
### IntelliJ IDEA ###
|
||||
.idea
|
||||
*.iws
|
||||
*.iml
|
||||
*.ipr
|
||||
|
||||
### NetBeans ###
|
||||
/nbproject/private/
|
||||
/nbbuild/
|
||||
/dist/
|
||||
/nbdist/
|
||||
/.nb-gradle/
|
||||
build/
|
||||
!**/src/main/**/build/
|
||||
!**/src/test/**/build/
|
||||
|
||||
### VS Code ###
|
||||
.vscode/
|
@ -1,15 +0,0 @@
|
||||
#
|
||||
# Build stage
|
||||
#
|
||||
FROM docker.io/library/maven:3.9.2-eclipse-temurin-17 AS build
|
||||
COPY src /home/app/src
|
||||
COPY pom.xml /home/app
|
||||
RUN mvn -f /home/app/pom.xml clean package
|
||||
|
||||
#
|
||||
# Package stage
|
||||
#
|
||||
FROM docker.io/library/eclipse-temurin:17-jre
|
||||
COPY --from=build /home/app/target/demo-0.0.1-SNAPSHOT.jar /usr/local/lib/demo.jar
|
||||
EXPOSE 8080
|
||||
ENTRYPOINT ["java","-jar","/usr/local/lib/demo.jar"]
|
@ -1,42 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-parent</artifactId>
|
||||
<version>3.1.0</version>
|
||||
<relativePath/> <!-- lookup parent from repository -->
|
||||
</parent>
|
||||
<groupId>com.example</groupId>
|
||||
<artifactId>demo</artifactId>
|
||||
<version>0.0.1-SNAPSHOT</version>
|
||||
<name>demo</name>
|
||||
<description>Demo project for Spring Boot</description>
|
||||
<properties>
|
||||
<java.version>17</java.version>
|
||||
</properties>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-web</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
@ -1,13 +0,0 @@
|
||||
package com.example.demo;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
|
||||
@SpringBootApplication
|
||||
public class DemoApplication {
|
||||
|
||||
public static void main(String[] args) {
|
||||
SpringApplication.run(DemoApplication.class, args);
|
||||
}
|
||||
|
||||
}
|
@ -1,14 +0,0 @@
|
||||
package com.example.demo;
|
||||
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
@RestController
|
||||
public class DemoController {
|
||||
|
||||
@RequestMapping("/")
|
||||
public Object root() {
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
@ -1,13 +0,0 @@
|
||||
package com.example.demo;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.boot.test.context.SpringBootTest;
|
||||
|
||||
@SpringBootTest
|
||||
class DemoApplicationTests {
|
||||
|
||||
@Test
|
||||
void contextLoads() {
|
||||
}
|
||||
|
||||
}
|
@ -6,17 +6,24 @@
|
||||
soft: false
|
||||
provides:
|
||||
- nebulous-cloud-fog-service-broker-container-images
|
||||
description: Build the container images.
|
||||
description: Build the container images for both the backend and frontend.
|
||||
files: &image_files
|
||||
- ^java-spring-boot-demo/
|
||||
- ^cfsb-backend/
|
||||
- ^cfsb-frontend/
|
||||
vars: &image_vars
|
||||
promote_container_image_job: nebulous-cloud-fog-service-broker-upload-container-images
|
||||
container_images:
|
||||
- context: java-spring-boot-demo
|
||||
- context: cfsb-backend
|
||||
registry: quay.io
|
||||
repository: quay.io/nebulous/cloud-fog-service-broker-java-spring-boot-demo
|
||||
repository: quay.io/nebulous/cloud-fog-service-broker-backend
|
||||
namespace: nebulous
|
||||
repo_shortname: cloud-fog-service-broker-java-spring-boot-demo
|
||||
repo_shortname: cloud-fog-service-broker
|
||||
repo_description: ""
|
||||
- context: cfsb-frontend
|
||||
registry: quay.io
|
||||
repository: quay.io/nebulous/cloud-fog-service-broker-frontend
|
||||
namespace: nebulous
|
||||
repo_shortname: cloud-fog-service-broker
|
||||
repo_description: ""
|
||||
|
||||
- job:
|
||||
@ -27,14 +34,14 @@
|
||||
soft: false
|
||||
provides:
|
||||
- nebulous-cloud-fog-service-broker-container-images
|
||||
description: Build and upload the container images.
|
||||
description: Build and upload both the backend and frontend container images.
|
||||
files: *image_files
|
||||
vars: *image_vars
|
||||
|
||||
- job:
|
||||
name: nebulous-cloud-fog-service-broker-promote-container-images
|
||||
parent: nebulous-promote-container-images
|
||||
description: Promote previously uploaded container images.
|
||||
description: Promote previously uploaded backend and frontend container images.
|
||||
files: *image_files
|
||||
vars: *image_vars
|
||||
|
||||
@ -44,7 +51,8 @@
|
||||
description: Run Hadolint on Dockerfile(s).
|
||||
vars:
|
||||
dockerfiles:
|
||||
- java-spring-boot-demo/Dockerfile
|
||||
- cfsb-backend/Dockerfile
|
||||
- cfsb-frontend/Dockerfile
|
||||
|
||||
- job:
|
||||
name: nebulous-cloud-fog-service-broker-helm-lint
|
||||
|
Loading…
Reference in New Issue
Block a user