files from/for giulia

master
Michael Murtaugh 7 years ago
parent ae7bae033e
commit b978db23c9

@ -0,0 +1 @@
FLOPPYLEFT - 2017

@ -0,0 +1,7 @@
Author: Slavoj Žižek
Date: 1989
Title: The Sublime Object of Floppy
Description:
And so on, and so on, and so on.

@ -0,0 +1,50 @@
#!/usr/bin/env python
import cgi, jinja2, os, json, re
import cgitb; cgitb.enable()
from jinja2 import Template
# Directory => ITEMS list (all files with a timestamp name, grouped)
ff = os.listdir(".")
tpat = re.compile(r"^(\d\d\d\d)(\d\d)(\d\d)T(\d\d)(\d\d)(\d\d)Z")
items = {}
for f in ff:
base, ext = os.path.splitext(f)
ext = ext[1:]
m = tpat.match(f)
if m:
t = m.group(0)
if t not in items:
items[t] = {}
items[t][ext] = f
items = [items[key] for key in sorted(items, reverse=True)]
# # dump the data (debugging)
# print "Content-type: text/plain"
# print ""
# print json.dumps(items, indent=2)
# Output template with items
print "Content-type: text/html"
print ""
print Template(u"""<html>
<head>
<title>RECORD O RAMA</title>
</head>
<body>
<form method="get" action="record.cgi">
<input type="submit" value="record" />
</form>
{% for i in items %}
<a href="../{{i.mp4}}"><img src="../{{i.jpg}}" /></a>
{% endfor %}
<div>
<form method="get" action="record.cgi">
<input type="submit" value="record" />
</form>
</div>
</body>
</html>""").render(items=items).encode("utf-8")

@ -0,0 +1,23 @@
#!/usr/bin/env python
import cgi, sys, datetime
import cgitb; cgitb.enable()
import subprocess
print "Content-type: text/html"
print
print "hello from python<br>"
n = datetime.datetime.now()
basename = n.strftime("%Y%m%dT%H%M%SZ")
o1 = subprocess.check_output(["scripts/simplerecord.py", "--output", basename+".avi", "--time", "5"])
print o1 + "<br>\n"
o2 = subprocess.check_output(["ffmpeg", "-i", basename+".avi", "-y", basename+".mp4"])
print o2 + "<br>\n"
o2 = subprocess.check_output(["ffmpeg", "-i", basename+".avi", "-ss", "1", "-vframes", "1", "-y", basename+".jpg"])
print o2 + "<br>\n"
# print """<a href=\"../{0}.mp4\">VIDEO</a>""".format(basename)
print """<a href="index.cgi">OK</a>"""

@ -0,0 +1,11 @@
Gait analysis number one.
Please state your name:
Position yourself 2 to 3 meters away from the Tetra Gamma Circulaire.
Walk towards the Tetra Gamma Circulaire in a straight line .
Position yourself one meter away to the left of the Tetra Gamma Circulaire.
Walk from left to right in front of the Tetra Gamma Circulaire.
Turn your back to the Tetra Gamma Circulaire.
Walk away from the Tetra Gamma Circulaire.
Position yourself 2 to 3 meters away from the Tetra Gamma Circulaire.
Walk towards the Tetra Gamma Circulaire on a zig zag line.

@ -0,0 +1,12 @@
#N canvas 296 315 450 300 10;
#X obj 37 104 osc~ 440;
#X obj 37 146 dac~;
#X obj 161 74 loadbang;
#X msg 161 111 \; pd dsp 1;
#X obj 37 36 netreceive 3000;
#X obj 46 62 print;
#X connect 0 0 1 0;
#X connect 0 0 1 1;
#X connect 2 0 3 0;
#X connect 4 0 5 0;
#X connect 4 0 0 0;

@ -0,0 +1,12 @@
#!/usr/bin/env python
import os, random, time
while True:
freq = str(random.randint(0,10)*110)
print(freq)
os.system('echo "'+freq+';" | pdsend 3000')
time.sleep(0.25)

@ -0,0 +1,17 @@
#! /usr/bin/env python
import subprocess
from time import sleep
# requires: espeak to be installed
waittimes = [1,2,1,4,1,4,1,4,1,4]
f=open("instructions.txt","r")
txt=f.readlines()
for i, line in enumerate(txt):
waittime = waittimes[i]
print i, waittime #, line,
subprocess.call(["espeak", line, "-v", "en"]) # character speaks: his/her line
sleep(waittime) # make pause after each text line

@ -0,0 +1,64 @@
#!/usr/bin/env python
import numpy as np
import cv2
import video
def draw_flow(img, flow, step=16):
h, w = img.shape[:2]
y, x = np.mgrid[step/2:h:step, step/2:w:step].reshape(2,-1)
fx, fy = flow[y,x].T
lines = np.vstack([x, y, x+fx, y+fy]).T.reshape(-1, 2, 2)
lines = np.int32(lines + 0.5)
vis = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR)
cv2.polylines(vis, lines, 0, (0, 0, 0))
# for (x1, y1), (x2, y2) in lines:
# cv2.circle(vis, (x1, y1), 1, (0, 255, 0), -1)
return vis
def draw_hsv(flow):
h, w = flow.shape[:2]
fx, fy = flow[:,:,0], flow[:,:,1]
ang = np.arctan2(fy, fx) + np.pi
v = np.sqrt(fx*fx+fy*fy)
# hsv = np.zeros((h, w, 3), np.uint8)
# hsv[...,0] = ang*(180/np.pi/2)
# hsv[...,1] = 255
# hsv[...,2] = np.minimum(v*4, 255)
# bgr = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR)
f = np.zeros((h, w, 3), np.uint8)
f[...,0] = 0 #np.minimum(v*10, 255)
f[...,1] = 0
f[...,2] = 255- np.minimum(v**2, 255) #ang*(180/np.pi/2)
bgr = cv2.cvtColor(f, cv2.COLOR_HSV2BGR)
return bgr
width, height = 640, 480
cam = video.create_capture("0:size="+str(width)+"x"+str(height))
while True:
ret, prev = cam.read()
prevgray = cv2.cvtColor(prev, cv2.COLOR_BGR2GRAY)
if prevgray.shape == (height, width):
break
while True:
ret, img = cam.read()
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
print prevgray.shape, gray.shape
flow = cv2.calcOpticalFlowFarneback(prevgray, gray, 0.5, 3, 15, 3, 5, 1.2, 0)
prevgray = gray
# cv2.imshow('flow', draw_flow(gray, flow))
cv2.imshow('flow', draw_hsv(flow))
ch = 0xFF & cv2.waitKey(5)
if ch == 27:
break
cv2.destroyAllWindows()

@ -0,0 +1,93 @@
#!/usr/bin/env python
from __future__ import print_function
import cv2, os, sys, time
import numpy as np
from argparse import ArgumentParser
def draw(flow):
h, w = flow.shape[:2]
fx, fy = flow[:,:,0], flow[:,:,1]
ang = np.arctan2(fy, fx) + np.pi
v = np.sqrt(fx*fx+fy*fy)
# hsv = np.zeros((h, w, 3), np.uint8)
# hsv[...,0] = ang*(180/np.pi/2)
# hsv[...,1] = 255
# hsv[...,2] = np.minimum(v*4, 255)
# bgr = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR)
f = np.zeros((h, w, 3), np.uint8)
f[...,0] = 0 #np.minimum(v*10, 255)
f[...,1] = 0
f[...,2] = 255- np.minimum(v**2, 255) #ang*(180/np.pi/2)
bgr = cv2.cvtColor(f, cv2.COLOR_HSV2BGR)
return bgr
p = ArgumentParser("")
p.add_argument("--video", type=int, default=0, help="video, default: 0")
p.add_argument("--output", default=None, help="path to save movie, default: None (show live)")
p.add_argument("--width", type=int, default=640, help="pre-detect resize width")
p.add_argument("--height", type=int, default=480, help="pre-detect resize height")
p.add_argument("--fourcc", default="XVID", help="MJPG,mp4v,XVID")
p.add_argument("--framerate", type=float, default=25, help="output frame rate")
p.add_argument("--show", default=False, action="store_true")
p.add_argument("--time", type=float, default=None)
args = p.parse_args()
fourcc = None
cam = cv2.VideoCapture(args.video)
cam.set(cv2.cv.CV_CAP_PROP_FRAME_WIDTH, args.width)
cam.set(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, args.height)
if args.output:
try:
fourcc = cv2.cv.CV_FOURCC(*args.fourcc)
except AttributeError:
fourcc = cv2.VideoWriter_fourcc(*args.fourcc)
out = cv2.VideoWriter()
out.open(args.output, fourcc, args.framerate, (args.width, args.height))
else:
out = None
while True:
ret, prev = cam.read()
prevgray = cv2.cvtColor(prev, cv2.COLOR_BGR2GRAY)
if prevgray.shape == (args.height, args.width):
break
try:
if args.time != None:
start = time.time()
while True:
ret, frame = cam.read()
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
flow = cv2.calcOpticalFlowFarneback(prevgray, gray, 0.5, 3, 15, 3, 5, 1.2, 0)
prevgray = gray
frame = draw(flow)
if out != None:
out.write(frame)
if args.show:
cv2.imshow('display', frame)
if cv2.waitKey(5) & 0xFF == ord('q'):
break
if args.time != None:
elapsed = time.time() - start
if (elapsed >= args.time):
break
except KeyboardInterrupt:
pass
print ("\nCleaning up...")
if out:
out.release()
if args.show:
cv2.destroyAllWindows()

@ -0,0 +1,68 @@
#!/usr/bin/env python
from __future__ import print_function
import cv2, os, sys, time
from argparse import ArgumentParser
p = ArgumentParser("")
p.add_argument("--video", type=int, default=0, help="video, default: 0")
p.add_argument("--output", default=None, help="path to save movie, default: None (show live)")
p.add_argument("--width", type=int, default=640, help="pre-detect resize width")
p.add_argument("--height", type=int, default=480, help="pre-detect resize height")
p.add_argument("--fourcc", default="XVID", help="MJPG,mp4v,XVID")
p.add_argument("--framerate", type=float, default=25, help="output frame rate")
p.add_argument("--show", default=False, action="store_true")
p.add_argument("--time", type=float, default=None)
args = p.parse_args()
fourcc = None
cam = cv2.VideoCapture(args.video)
cam.set(cv2.cv.CV_CAP_PROP_FRAME_WIDTH, args.width)
cam.set(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, args.height)
if args.output:
try:
fourcc = cv2.cv.CV_FOURCC(*args.fourcc)
except AttributeError:
fourcc = cv2.VideoWriter_fourcc(*args.fourcc)
out = cv2.VideoWriter()
out.open(args.output, fourcc, args.framerate, (args.width, args.height))
else:
out = None
try:
if args.time != None:
start = time.time()
while True:
ret, frame = cam.read()
if out != None:
out.write(frame)
if args.show:
cv2.imshow('display', frame)
if cv2.waitKey(5) & 0xFF == ord('q'):
break
if args.time != None:
elapsed = time.time() - start
if (elapsed >= args.time):
break
except KeyboardInterrupt:
pass
print ("\nCleaning up...")
if out:
out.release()
if args.show:
cv2.destroyAllWindows()
Loading…
Cancel
Save