From b316732f2c8cf55810b9cc63421104340a446e57 Mon Sep 17 00:00:00 2001 From: Franc Date: Fri, 24 Mar 2017 10:40:00 +0100 Subject: [PATCH] commit to pull --- floppies/Kimmy_Rest/.DS_Store | Bin 6148 -> 6148 bytes floppies/claudia/fine-line/.DS_Store | Bin 6148 -> 6148 bytes floppies/claudia/noweb/.DS_Store | Bin 6148 -> 6148 bytes floppies/franc/.DS_Store | Bin 6148 -> 6148 bytes floppies/franc/noweb/index.html | 652 ++++++++++++++++++++++++++- 5 files changed, 651 insertions(+), 1 deletion(-) diff --git a/floppies/Kimmy_Rest/.DS_Store b/floppies/Kimmy_Rest/.DS_Store index 57c3d4f062a9913dfaf9117e2a5225a2023134d2..cf5fa4ad3e4b03439f5c4dffb4c98f7d042363a0 100644 GIT binary patch delta 111 zcmZoMXfc?uJeoQEFarYvD?=VbK0`S}Dnn9kzKcsrX-P5z!46q!6RFcF)>DevjXEI Kw$1DufB6BY3>fnO diff --git a/floppies/claudia/fine-line/.DS_Store b/floppies/claudia/fine-line/.DS_Store index cf5da4be64d59468a80c61369eaec58001139e19..f2c5ddace2ddcbf94b3b7d2114861d96ddd3aed3 100644 GIT binary patch delta 65 zcmZoMXffE}$HdGa!7@3JNs;v@vm^rpkR>s>pNSLB6B8)NFbq!4&n;j80|kc3dzmFS J-(k`a1pxiK4_^QP delta 65 zcmZoMXffE}$HdHLBR)BhNs;wOmJ0&|kR>s>pNSLB6B8)NFbq!4&n;j80k#B&$$Ob4 KH{W5>5Cs4{HxUE? diff --git a/floppies/claudia/noweb/.DS_Store b/floppies/claudia/noweb/.DS_Store index cdfa72dd5ce3ababe05cc338f9d1e210d1d03ab1..0079a00c2f7ec6831b9c023fab31e339c12b5cbf 100644 GIT binary patch delta 29 jcmZoMXffDO!N~kw^U~yMMpYId%`jPqQF8NMMt3m)rKSp4 delta 29 jcmZoMXffDO!N~k=Th-)hMpYId%`jPqQF8NMMt3m)tkep? diff --git a/floppies/franc/.DS_Store b/floppies/franc/.DS_Store index 5008ddfcf53c02e82d7eee2e57c38e5672ef89f6..0d7ac68d3a6407abca9bcf2cf46e3c4b2d3c939f 100644 GIT binary patch delta 462 zcmZoMXfc=|#>B)qF;Q%yo}w@t0|Nsi1A_nqLsC+CaY0f}e$vLom5lWuaaM*rhJ1!{ zhE#+cQ1p0GK~83IiGjg&MkZz!RyKAHPEHO^UXIw{jQsN8lEjkIVyDESXfQ7`B{d1k z4$05YfwL2n!ZK6K;{`;V^Ycm)GxJi5kd()SXXe$XAjHu~2NHo+1YW5HK<@2yAX-Ze!Uzfmw@rGdl-A2T%b} + + + + Pushing the Score + + + + + + + + + + + + +
+ +

+

+

+ + +
+

PUSHING THE SCORE

+
+ + + + + + +

+ www.issue.xpub.nl/02/ www.deplayer.nl/ +

+ + + + +

+ +
CONCEPT

+ +

+ +
BODY

+ +

+ +
SAMPLES

+ +
+ + + + + + + + + + + + + + + + + + + + + + + Technology is providing us new ways to shape our perception of space, while at the same time it is transforming our bodies into gadgets. This is not only changing our spatial awareness but it’s also extending our senses beyond given nature. Moreover, control systems that regulate and command specific behaviours can be very practical tools to improve physical functionalities or translate its data. For instance, this experiment employs “Optical Flow” sensor which detects motion from image objects between frames, and “Open Sound Control (OSC)” which enables to exchange and format data from different devices, for instance, from Python to Puredata. Although the unique possibilities to improving human physical or cognitive limitations by plugging a body to an electronic or mechanical device are yet very hypothetical and might extend beyond our imagination, nevertheless technology is continuously transforming the abstract or fictional conception of “cybernetics” to a more realistic evidence. The communication between both automated and living systems is continuously evolving, upgrading and rising up more sophisticated engineered tools that might enable us to increase our knowledge, morphing our perception through deeper experiences. In this experiment, the potential for controlling data through motion on space while becoming independent of physicality, opens up new creative and pragmatic alternatives for facing both technological and communication constraints. + + + + + + This body analyses human motion on space and detects it using “Opitcal Flow” in “Python”, using a series of predesigned multidirectional interpreters. These interpreters are made up of a series of points (intersections), forming a grid which intersects with movement. This is detected in form of numeric values, which are automatically transmitted and formatted to a graphic array in Puredata. This array arrange these values and generates a polygonal waveform based on these received coordinates (which numbers ranges between "x", having values from 0 to 10, and "y" from -1 to 1). This activates an “oscillator” object which defines the frequency of the tone, together with “metro” object, which time spans its duration in miliseconds, consequently iterating the audio (re-writting it in the display). The intersections and the graphic array (together with the entire Puredata patch) become an interactive notation system, while people become the instrument/tool that triggers it. + X / Y INTERSECTIONS + + 5 PUREDATA EXTENDED6 OSC MSG RECEIVE7 GRAPHIC ARRAY8 OSC / METRO + + + SYSTEM;1 PYTHON2 OPTICAL FLOW2 INTERSECTIONS3 OSC MSG SEND + + + Y range = -1 to 1 + X range = 0 to 10 + Waveform + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + OPTICAL FLOW + + #!/usr/bin/env pythonimport numpy as npimport cv2, mathimport videohelp_message = '''USAGE: opt_flow.py [<video_source>]Keys: 1 - toggle HSV flow visualization 2 - toggle glitch'''# def draw_flow(img, flow, step=4): # size grid # h, w = img.shape[:2]# y, x = np.mgrid[step/2:h:step, step/2:w:step].reshape(2,-1)# fx, fy = flow[y,x].T# lines = np.vstack([x, y, x+fx, y+fy]).T.reshape(-1, 2, 2)# lines = np.int32(lines + 0.5)# vis = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR)# cv2.polylines(vis, lines, 0, (0, 0, 255)) # BGR# for (x1, y1), (x2, y2) in lines:# cv2.circle(vis, (x1, y1), 1, (0, 255, 0), -1)# return visimport OSC# from pythonosc import osc_message_builder# from pythonosc import udp_clientimport timedef send_flow0(img, flow, step=4): # size grid h, w = img.shape[:2] y, x = np.mgrid[step/2:h:step, step/2:w:step].reshape(2,-1) fx, fy = flow[y,x].T #print "fx, fy", fx, fy lines = np.vstack([x, y, x+fx, y+fy]).T.reshape(-1, 2, 2) lines = np.int32(lines + 0.5) vis = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR) flines = [] for (x1, y1), (x2, y2) in lines: # print ("y1", y1) if (x1 == 38 or x1 == 46 or x1 == 54 or x1 == 62 or x1 == 70 or x1 == 78 or x1 == 86 or x1 == 94 or x1 == 102 or x1 == 110 or x1 == 118) and y1 in range(38, 90, 8): flines.append(((x1,y1),(x2,y2))) normx = x1 / 8 - 4 normy = 1 - ((y1 / 8 - 4) / 3.0) dx = x2-x1 dy = y2 - y1 m = int(math.sqrt( (dx*dx) + (dy*dy) )) if m>2: print ("dot", (normx, normy)) msg = OSC.OSCMessage() msg.setAddress("/dot") #msg.append(dx) #msg.append(dy) #msg.append(m) msg.append(normx) msg.append(normy) client.send(msg) # client.send_message("/franc", m) flines = np.int32(flines) cv2.polylines(vis, flines, 0, (0, 40, 255)) # BGR for (x1, y1), (x2, y2) in flines: cv2.circle(vis, (x1, y1), 1, (0, 255, 0), -1) return vis flines = np.int32(flines) cv2.polylines(vis, flines, 0, (0, 40, 255)) # BGR for (x1, y1), (x2, y2) in flines: cv2.circle(vis, (x1, y1), 1, (0, 255, 0), -1) return vis # cv2.rectangle(img, pt1, pt2, color[, thickness[, lineType[, shift]]])if __name__ == '__main__': import sys print help_message try: fn = sys.argv[1] except: fn = 0 # connect to pd # Init OSC client = OSC.OSCClient() client.connect(('127.0.0.1', 9001)) # first argument is the IP of the host, second argument is the port to use #data="hello" # client = udp_client.SimpleUDPClient("127.0.0.1", 9001) # connect camera # cam = video.create_capture(fn) cam = video.create_capture("0:size=160x120") #canvas size in pixels ret, prev = cam.read() prevgray = cv2.cvtColor(prev, cv2.COLOR_BGR2GRAY) cur_glitch = prev.copy() while True: # print "GRAB FRAME" ret, img = cam.read() gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) flow = cv2.calcOpticalFlowFarneback(prevgray, gray, 0.5, 3, 15, 3, 5, 1.2, 0) prevgray = gray cv2.imshow('flow', send_flow0(gray, flow)) ch = 0xFF & cv2.waitKey(5) if ch == 27: break cv2.destroyAllWindows() + + + By exploring the connection between motion and sound, experiments have been performed through different software and tools, which has strengthen substantially the following additional material in this project. For instance, Kinect sensor and Synapse, which receives input data from Kinect and sends it out to Ableton or Max MSP, have been tested out. Similarly, motion detection was together explored with “color detection” in Puredata, which brought up more interesting alternatives. Sound recording and feedback loop was further tested with this method, though mechanically it was hardly accurate. Finally with “Optical Flow”, this work was reconfigured with a wider sense for interacting with data. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +