import gym, random, copy, xlwt
from gym import spaces
from gym.utils import seeding
import numpy as np
import networkx as nx
from . import utils
class clusterEnv(gym.Env):
metadata = {"render.modes": ["human", "rgb_array"], "video.frames_per_second": 50}
def __init__(self):
self.M = 30 # 动作列表长度,不包括-1
self.t_unit = 10 # 时间单位,用于生成DAG节点的任务需求时间,80%的概率平均分布于t-3t 20的概率平均分布在10t-15t
self.cpu_res_unit = 100 # CPU资源单位,用于生成DAG节点的CPU占用需求,50%的概率的任务为CPU资源需求密集型,随机占用0.25r-0.5r 50%的概率随机占用0.05r-0.01r
self.memory_res_unit = 100 # Memory资源单位,用于生成DAG节点的memoory占用需求,50%的概率的任务为memory资源需求密集型,随机占用0.25r-0.5r 50%的概率随机占用0.05r-0.01r
self.time, self.cpu_res, self.memory_res = 0, 100, 100
##状态信息
self.time = 0 # 整体环境时钟,DAG执行已花费时间
self.cpu_res = 100 # 当前计算资源的CPU容量
self.memory_res = 100 # 当前计算资源的memory容量
self.b_level = None # b-level值
self.children_num = None # 子节点数
self.edges = [] # 随机生成DAG的信息
self.available_list = [-1] * self.M # 调度输入个数 尽量大于readylist 取self.M,不足补-1
self.duration = [] # 随机生成DAG的时间占用信息
self.demand = [] # backlot的总资源占用信息
self.wait_duration = [-1] * self.M # 随机生成的DAG时间占用信息
self.cpu_demand = [-1] * self.M # 随机生成的DAG cpu 资源占用信息
self.memory_demand = [-1] * self.M # 随机生成的DAG memory 资源占用信息
self.ready_list = [] # 满足依赖关系的DAG,可能被执行,但不满足
self.done_job = [] # 已完成的任务ID
self.position = [] # 当前DAG画图坐标
self.backlot_time = 0 # backlot的总时间占用信息
self.backlot_cpu_res = 0 # backlot的总CPU占用信息
self.backlot_memory_res = 0 # backlot的总memory占用信息
##状态转移信息和中间变量
self.tasks = [] # 计算资源上挂起的任务
self.tasks_remaing_time = {} # 计算资源上挂起的任务剩余执行时间
self.seed1 = 0
self.viewer = None
self.state = None
self.edges_lib = []
self.duration_lib = []
self.demand_lib = []
self.DAGsize = 30
self.load_train_dataset(self.DAGsize)
# self.load_test_dataset(self.DAGsize)
def set_state(self, state):
self.state = copy.deepcopy(state)
def set_ready_list(self, ready_list):
self.ready_list = copy.deepcopy(ready_list)
def set_done_job(self, done_job):
self.done_job = copy.deepcopy(done_job)
def set_tasks(self, tasks):
self.tasks = copy.deepcopy(tasks)
def set_wait_duration(self, wait_duration):
self.wait_duration = copy.deepcopy(wait_duration)
def set_cpu_demand(self, cpu_demand):
self.cpu_demand = copy.deepcopy(cpu_demand)
def set_memory_demand(self, memory_demand):
self.memory_demand = copy.deepcopy(memory_demand)
def set_tasks_remaing_time(self, tasks_remaing_time):
self.tasks_remaing_time = copy.deepcopy(tasks_remaing_time)
def set_cpu_res(self, cpu_res):
self.cpu_res = cpu_res
def set_memory_res(self, memory_res):
self.memory_res = memory_res
def set_time(self, time):
self.time = time
def load_train_dataset(self, DAGsize):
##########################################training################################
print('train datasheet lib.')
edges_lib_path = '/Users/livion/Documents/GitHub/Cloud-Workflow-Scheduling-base-on-Deep-Reinforcement-Learning/npy/train_datasheet/' + str(
DAGsize) + '/edges' + str(DAGsize) + '_lib.npy'
duration_lib_path = '/Users/livion/Documents/GitHub/Cloud-Workflow-Scheduling-base-on-Deep-Reinforcement-Learning/npy/train_datasheet/' + str(
DAGsize) + '/duration' + str(DAGsize) + '_lib.npy'
demand_lib_path = '/Users/livion/Documents/GitHub/Cloud-Workflow-Scheduling-base-on-Deep-Reinforcement-Learning/npy/train_datasheet/' + str(
DAGsize) + '/demand' + str(DAGsize) + '_lib.npy'
# edges_lib_path = '/Users/livion/Documents/GitHub/Cloud-Workflow-Scheduling-base-on-Deep-Reinforcement-Learning/npy/train_datasheet/'+'test'+'/edges' +'test' +'_lib.npy'
# duration_lib_path = '/Users/livion/Documents/GitHub/Cloud-Workflow-Scheduling-base-on-Deep-Reinforcement-Learning/npy/train_datasheet/'+'test'+'/duration' + 'test' +'_lib.npy'
# demand_lib_path = '/Users/livion/Documents/GitHub/Cloud-Workflow-Scheduling-base-on-Deep-Reinforcement-Learning/npy/train_datasheet/'+'test'+'/demand'+'test'+'_lib.npy'
self.edges_lib = np.load(edges_lib_path, allow_pickle=True).tolist()
self.duration_lib = np.load(duration_lib_path, allow_pickle=True).tolist()
self.demand_lib = np.load(demand_lib_path, allow_pickle=True).tolist()
print('load completed.')
return
def load_test_dataset(self, DAGsize):
#########################################testing################################
print('test datasheet loaded.')
edges_lib_path = '/Users/livion/Documents/GitHub/Cloud-Workflow-Scheduling-base-on-Deep-Reinforcement-Learning/npy/test_datasheet/' + str(
DAGsize) + '/edges' + str(DAGsize) + '_lib.npy'
duration_lib_path = '/Users/livion/Documents/GitHub/Cloud-Workflow-Scheduling-base-on-Deep-Reinforcement-Learning/npy/test_datasheet/' + str(
DAGsize) + '/duration' + str(DAGsize) + '_lib.npy'
demand_lib_path = '/Users/livion/Documents/GitHub/Cloud-Workflow-Scheduling-base-on-Deep-Reinforcement-Learning/npy/test_datasheet/' + str(
DAGsize) + '/demand' + str(DAGsize) + '_lib.npy'
self.edges_lib = np.load(edges_lib_path, allow_pickle=True).tolist()
self.duration_lib = np.load(duration_lib_path, allow_pickle=True).tolist()
self.demand_lib = np.load(demand_lib_path, allow_pickle=True).tolist()
print('load completed.')
return
def return_dim_info(self):
return (3 + self.M * 3 + 5), self.M + 1
def _search_for_predecessor(self, node, edges):
'''
寻找前继节点
:param node: 需要查找的节点id
:param edges: DAG边信息
:return: node的前继节点id列表
'''
map = {}
if node == 'Start': return print("error, 'Start' node do not have predecessor!")
for i in range(len(edges)):
if edges[i][1] in map.keys():
map[edges[i][1]].append(edges[i][0])
else:
map[edges[i][1]] = [edges[i][0]]
succ = map[node]
return succ
def _search_for_successors(self, node, edges):
'''
寻找后续节点
:param node: 需要查找的节点id
:param edges: DAG边信息(注意最好传列表的值(edges[:])进去而不是传列表的地址(edges)!!!)
:return: node的后续节点id列表
'''
map = {}
if node == 'Exit': return print("error, 'Exit' node do not have successors!")
for i in range(len(edges)):
if edges[i][0] in map.keys():
map[edges[i][0]].append(edges[i][1])
else:
map[edges[i][0]] = [edges[i][1]]
pred = map[node]
return pred
def _update_ready_list(self, ready_list, done_job, edges):
'''
根据已完成的任务更新当前可以执行的task列表,满足DAG的依赖关系。并不表明它可以被执行,因为还要受资源使用情况限制
:param ready_l
没有合适的资源?快使用搜索试试~ 我知道了~
资源推荐
资源详情
资源评论
收起资源包目录
基于深度强化学习的云工作流调度.zip (132个子文件)
events.out.tfevents.1648025256.bogon.20133.0 835KB
events.out.tfevents.1650357478.bogon.12292.0 557KB
events.out.tfevents.1650505386.bogon.2078.0 556KB
events.out.tfevents.1650364222.bogon.15910.0 556KB
events.out.tfevents.1648035224.bogon.23193.0 556KB
events.out.tfevents.1648038395.bogon.25510.0 417KB
events.out.tfevents.1648041268.bogon.26891.0 334KB
.DS_Store 14KB
.DS_Store 8KB
.DS_Store 6KB
.DS_Store 6KB
.DS_Store 6KB
.DS_Store 6KB
.DS_Store 6KB
.DS_Store 6KB
.DS_Store 6KB
.DS_Store 6KB
.DS_Store 6KB
.DS_Store 6KB
.DS_Store 6KB
.DS_Store 6KB
launch.json 481B
extensions.json 59B
edges50_lib.npy 1.01MB
edges50_lib.npy 1023KB
edges40_lib.npy 818KB
edges40_lib.npy 814KB
edges30_lib.npy 607KB
edges30_lib.npy 604KB
edges20_lib.npy 398KB
edges20_lib.npy 394KB
demand50_lib.npy 391KB
demand50_lib.npy 391KB
demand40_lib.npy 313KB
demand40_lib.npy 313KB
demand30_lib.npy 235KB
demand30_lib.npy 235KB
duration50_lib.npy 195KB
duration50_lib.npy 195KB
edges10_lib.npy 193KB
edges10_lib.npy 193KB
duration40_lib.npy 156KB
demand20_lib.npy 156KB
duration40_lib.npy 156KB
demand20_lib.npy 156KB
duration30_lib.npy 117KB
duration30_lib.npy 117KB
demand10_lib.npy 78KB
duration20_lib.npy 78KB
demand10_lib.npy 78KB
duration20_lib.npy 78KB
edgestest_lib.npy 63KB
duration10_lib.npy 39KB
duration10_lib.npy 39KB
demandtest_lib.npy 24KB
durationtest_lib.npy 12KB
actor.pkl 29KB
critic.pkl 25KB
消融实验.png 933KB
resource.png 458KB
random.png 412KB
makespan50.png 334KB
makespan40.png 333KB
makespan20.png 329KB
makespan30.png 322KB
makespan10.png 314KB
10DAG.png 214KB
image-20220228152039290.png 192KB
image-20220228153127773.png 189KB
p11.png 127KB
PPO.png 81KB
image-20220228114904174.png 68KB
spear.png 51KB
PPO_graphEnv-v0_30GCN_256.pth 678KB
PPO_graphEnv-v0_30GCN_128.pth 213KB
PPO_MyEnv-v0_30.pth 94KB
PPO_clusterEnv-v0_30MCTS.pth 94KB
PPO_clusterEnv-v0_40MCTS.pth 94KB
PPO_clusterEnv-v0_50MCTS.pth 94KB
PPO_clusterEnv-v0_20MCTS.pth 94KB
PPO_clusterEnv-v0_10MCTS.pth 94KB
PPO_clusterEnv-v0_resourceTest.pth 94KB
PPO_graphEnv-v0_30GCN.pth 76KB
NonLinearNw2_3-3.pth 2KB
NonLinearNw3_3-3.pth 2KB
NonLinearNw1.pth 2KB
GCN_0.pth 2KB
GCN_1.pth 2KB
NonLinearNw3.pth 2KB
NonLinearNw2.pth 2KB
clusterEnv.py 20KB
graphEnv.py 19KB
myEnv.py 18KB
testEnv.py 18KB
MonteCarloTreeSearch.py 14KB
resources_monitor MCTS.py 14KB
PPOagentForGCN.py 14KB
PPOagent.py 14KB
PPOtest.py 12KB
PPOtestForGCN.py 12KB
共 132 条
- 1
- 2
资源评论
博士僧小星
- 粉丝: 1935
- 资源: 5894
上传资源 快速赚钱
- 我的内容管理 展开
- 我的资源 快来上传第一个资源
- 我的收益 登录查看自己的收益
- 我的积分 登录查看自己的积分
- 我的C币 登录后查看C币余额
- 我的收藏
- 我的下载
- 下载帮助
安全验证
文档复制为VIP权益,开通VIP直接复制
信息提交成功