mirror of
https://github.com/newnius/YAO-agent.git
synced 2025-06-06 05:21:55 +00:00
update
This commit is contained in:
parent
4d9edef1fc
commit
71bd27f530
7
.gitignore
vendored
7
.gitignore
vendored
@ -4,12 +4,15 @@ status.xml
|
||||
|
||||
|
||||
|
||||
# IDEA
|
||||
|
||||
# IDEA IntelliJ
|
||||
*.iml
|
||||
.idea/
|
||||
|
||||
|
||||
# Tmp files
|
||||
*.swp
|
||||
*~
|
||||
|
||||
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
|
17
Dockerfile
Normal file
17
Dockerfile
Normal file
@ -0,0 +1,17 @@
|
||||
FROM nvidia/cuda:9.0-base
|
||||
|
||||
MAINTAINER Newnius <newnius.cn@gmail.com>
|
||||
|
||||
RUN apt update && \
|
||||
apt install -y python3 python3-pip
|
||||
|
||||
RUN pip3 install docker kafka
|
||||
|
||||
ADD bootstrap.sh /etc/bootstrap.sh
|
||||
|
||||
ADD yao-agent.py /root/yao-agent.py
|
||||
ADD server.py /root/server.py
|
||||
|
||||
WORKDIR /root
|
||||
|
||||
CMD ["/etc/bootstrap.sh"]
|
6
bootstrap.sh
Executable file
6
bootstrap.sh
Executable file
@ -0,0 +1,6 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
|
||||
python3 /root/yao-agent.py &
|
||||
|
||||
python3 /root/server.py
|
22
executor.py
Normal file
22
executor.py
Normal file
@ -0,0 +1,22 @@
|
||||
import docker
|
||||
|
||||
|
||||
def run():
|
||||
client = docker.from_env()
|
||||
#print(client.containers.run(image="alpine", command="echo 'Hello World'", environment={"KEY": "value"}))
|
||||
print(client.containers.run(image="nvidia/cuda:9.0-base", command="nvidia-smi", environment={"KEY": "value"}, runtime="nvidia"))
|
||||
|
||||
|
||||
def run_in_background():
|
||||
client = docker.from_env()
|
||||
container = client.containers.run("alpine", ["echo", "hello", "world"], detach=True)
|
||||
print(container.id)
|
||||
|
||||
|
||||
def list_containers():
|
||||
client = docker.from_env()
|
||||
for container in client.containers.list():
|
||||
print(container.id)
|
||||
|
||||
|
||||
run()
|
108
server.py
Normal file
108
server.py
Normal file
@ -0,0 +1,108 @@
|
||||
#!/usr/bin/python
|
||||
from http.server import BaseHTTPRequestHandler, HTTPServer
|
||||
import cgi
|
||||
import docker
|
||||
import json
|
||||
from urllib import parse
|
||||
|
||||
PORT_NUMBER = 8000
|
||||
|
||||
|
||||
# This class will handles any incoming request from
|
||||
# the browser
|
||||
class MyHandler(BaseHTTPRequestHandler):
|
||||
# Handler for the GET requests
|
||||
def do_GET(self):
|
||||
req = parse.urlparse(self.path)
|
||||
query = parse.parse_qs(req.query)
|
||||
|
||||
if req.path == "/ping":
|
||||
# Open the static file requested and send it
|
||||
self.send_response(200)
|
||||
self.send_header('Content-type', 'application/json')
|
||||
self.end_headers()
|
||||
self.wfile.write(bytes("pong", "utf-8"))
|
||||
|
||||
elif req.path == "/logs":
|
||||
id = query['id'][0]
|
||||
client = docker.from_env()
|
||||
container = client.containers.get(id)
|
||||
|
||||
msg = {'code': 0, 'logs': container.logs().decode()}
|
||||
self.send_response(200)
|
||||
self.send_header('Content-type', 'application/json')
|
||||
self.end_headers()
|
||||
self.wfile.write(bytes(json.dumps(msg), "utf-8"))
|
||||
|
||||
else:
|
||||
self.send_error(404, 'File Not Found: %s' % self.path)
|
||||
|
||||
# Handler for the POST requests
|
||||
def do_POST(self):
|
||||
if self.path == "/create":
|
||||
form = cgi.FieldStorage(
|
||||
fp=self.rfile,
|
||||
headers=self.headers,
|
||||
environ={
|
||||
'REQUEST_METHOD': 'POST',
|
||||
'CONTENT_TYPE': self.headers['Content-Type'],
|
||||
})
|
||||
docker_image = form["image"].value
|
||||
docker_cmd = form["cmd"].value
|
||||
|
||||
print(docker_image)
|
||||
print(docker_cmd)
|
||||
|
||||
client = docker.from_env()
|
||||
container = client.containers.run(
|
||||
image=docker_image,
|
||||
command=docker_cmd,
|
||||
environment={"key": "value"},
|
||||
runtime="nvidia",
|
||||
detach=True
|
||||
)
|
||||
|
||||
msg = {"code": 0, "id": container.id}
|
||||
|
||||
self.send_response(200)
|
||||
self.send_header('Content-type', 'application/json')
|
||||
self.end_headers()
|
||||
self.wfile.write(bytes(json.dumps(msg), "utf-8"))
|
||||
|
||||
elif self.path == "/stop":
|
||||
form = cgi.FieldStorage(
|
||||
fp=self.rfile,
|
||||
headers=self.headers,
|
||||
environ={
|
||||
'REQUEST_METHOD': 'POST',
|
||||
'CONTENT_TYPE': self.headers['Content-Type'],
|
||||
})
|
||||
id = form["id"].value
|
||||
|
||||
client = docker.from_env()
|
||||
container = client.containers.get(id)
|
||||
container.stop()
|
||||
msg = {"code": 0}
|
||||
|
||||
self.send_response(200)
|
||||
self.send_header('Content-type', 'application/json')
|
||||
self.end_headers()
|
||||
self.wfile.write(bytes(json.dumps(msg), "utf-8"))
|
||||
else:
|
||||
self.send_error(404, 'File Not Found: %s' % self.path)
|
||||
|
||||
|
||||
try:
|
||||
# Create a web server and define the handler to manage the
|
||||
# incoming request
|
||||
server = HTTPServer(('', PORT_NUMBER), MyHandler)
|
||||
print('Started httpserver on port ', PORT_NUMBER)
|
||||
|
||||
# Wait forever for incoming htto requests
|
||||
server.serve_forever()
|
||||
|
||||
except KeyboardInterrupt:
|
||||
print('^C received, shutting down the web server')
|
||||
|
||||
|
||||
server.socket.close()
|
13
yao-agent.py
13
yao-agent.py
@ -6,7 +6,6 @@ from xml.dom.minidom import parse
|
||||
import xml.dom.minidom
|
||||
from kafka import KafkaProducer
|
||||
|
||||
|
||||
ClientID = os.getenv('ClientID', 1)
|
||||
KafkaBrokers = os.getenv('KafkaBrokers', 'localhost:9092').split(',')
|
||||
|
||||
@ -18,7 +17,7 @@ def main():
|
||||
status, msg_gpu = execute(['nvidia-smi', '-q', '-x', '-f', 'status.xml'])
|
||||
if not status:
|
||||
print("execute failed, ", msg_gpu)
|
||||
continue
|
||||
continue
|
||||
report_msg()
|
||||
time.sleep(interval)
|
||||
except Exception as e:
|
||||
@ -55,6 +54,16 @@ def report_msg():
|
||||
'temperature_gpu': gpu.getElementsByTagName('temperature')[0].getElementsByTagName('gpu_temp')[0].childNodes[0].data,
|
||||
'power_draw': gpu.getElementsByTagName('power_readings')[0].getElementsByTagName('power_draw')[0].childNodes[0].data
|
||||
}
|
||||
|
||||
stat['fan_speed'] = int(float(stat['fan_speed'].split(' ')[0]))
|
||||
stat['memory_total'] = int(float(stat['memory_total'].split(' ')[0]))
|
||||
stat['memory_free'] = int(float(stat['memory_free'].split(' ')[0]))
|
||||
stat['memory_used'] = int(float(stat['memory_used'].split(' ')[0]))
|
||||
stat['utilization_gpu'] = int(float(stat['utilization_gpu'].split(' ')[0]))
|
||||
stat['utilization_mem'] = int(float(stat['utilization_mem'].split(' ')[0]))
|
||||
stat['temperature_gpu'] = int(float(stat['temperature_gpu'].split(' ')[0]))
|
||||
stat['power_draw'] = int(float(stat['power_draw'].split(' ')[0]))
|
||||
|
||||
stats.append(stat)
|
||||
|
||||
post_fields = {'id': ClientID, 'status': stats}
|
||||
|
Loading…
Reference in New Issue
Block a user