From 350c841dc4228c25d07cf7d9e6893e2e6fd17326 Mon Sep 17 00:00:00 2001 From: Mautisim Munir Date: Wed, 5 Oct 2022 14:57:41 +0500 Subject: [PATCH] added standalone posetracking gpu example --- .../ios/posetrackingsolutiongpu/AppDelegate.h | 21 ++ .../posetrackingsolutiongpu/AppDelegate.mm | 61 ++++++ .../AppIcon.appiconset/40_c_1x.png | Bin 0 -> 396 bytes .../AppIcon.appiconset/40_c_2x.png | Bin 0 -> 686 bytes .../AppIcon.appiconset/40_c_3x.png | Bin 0 -> 855 bytes .../AppIcon.appiconset/60_c_iphone_2x.png | Bin 0 -> 1071 bytes .../AppIcon.appiconset/60_c_iphone_3x.png | Bin 0 -> 1744 bytes .../AppIcon.appiconset/76_c_Ipad.png | Bin 0 -> 2655 bytes .../AppIcon.appiconset/76_c_Ipad_2x.png | Bin 0 -> 1180 bytes .../AppIcon.appiconset/Contents.json | 106 +++++++++ .../Assets.xcassets/Contents.json | 7 + .../ios/posetrackingsolutiongpu/BUILD | 41 +++- .../Base.lproj/LaunchScreen.storyboard | 25 +++ .../Base.lproj/Main.storyboard | 49 +++++ .../CommonViewController.h | 67 ++++++ .../CommonViewController.mm | 201 ++++++++++++++++++ .../ios/posetrackingsolutiongpu/Info.plist | 38 ++++ .../PoseTrackingViewController.h | 2 +- .../PoseTrackingViewController.mm | 12 +- .../ios/posetrackingsolutiongpu/main.m | 22 ++ 20 files changed, 649 insertions(+), 3 deletions(-) create mode 100644 mediapipe/examples/ios/posetrackingsolutiongpu/AppDelegate.h create mode 100644 mediapipe/examples/ios/posetrackingsolutiongpu/AppDelegate.mm create mode 100644 mediapipe/examples/ios/posetrackingsolutiongpu/Assets.xcassets/AppIcon.appiconset/40_c_1x.png create mode 100644 mediapipe/examples/ios/posetrackingsolutiongpu/Assets.xcassets/AppIcon.appiconset/40_c_2x.png create mode 100644 mediapipe/examples/ios/posetrackingsolutiongpu/Assets.xcassets/AppIcon.appiconset/40_c_3x.png create mode 100644 mediapipe/examples/ios/posetrackingsolutiongpu/Assets.xcassets/AppIcon.appiconset/60_c_iphone_2x.png create mode 100644 mediapipe/examples/ios/posetrackingsolutiongpu/Assets.xcassets/AppIcon.appiconset/60_c_iphone_3x.png create mode 100644 mediapipe/examples/ios/posetrackingsolutiongpu/Assets.xcassets/AppIcon.appiconset/76_c_Ipad.png create mode 100644 mediapipe/examples/ios/posetrackingsolutiongpu/Assets.xcassets/AppIcon.appiconset/76_c_Ipad_2x.png create mode 100644 mediapipe/examples/ios/posetrackingsolutiongpu/Assets.xcassets/AppIcon.appiconset/Contents.json create mode 100644 mediapipe/examples/ios/posetrackingsolutiongpu/Assets.xcassets/Contents.json create mode 100644 mediapipe/examples/ios/posetrackingsolutiongpu/Base.lproj/LaunchScreen.storyboard create mode 100644 mediapipe/examples/ios/posetrackingsolutiongpu/Base.lproj/Main.storyboard create mode 100644 mediapipe/examples/ios/posetrackingsolutiongpu/CommonViewController.h create mode 100644 mediapipe/examples/ios/posetrackingsolutiongpu/CommonViewController.mm create mode 100644 mediapipe/examples/ios/posetrackingsolutiongpu/main.m diff --git a/mediapipe/examples/ios/posetrackingsolutiongpu/AppDelegate.h b/mediapipe/examples/ios/posetrackingsolutiongpu/AppDelegate.h new file mode 100644 index 000000000..6b0377ef2 --- /dev/null +++ b/mediapipe/examples/ios/posetrackingsolutiongpu/AppDelegate.h @@ -0,0 +1,21 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import + +@interface AppDelegate : UIResponder + +@property(strong, nonatomic) UIWindow *window; + +@end diff --git a/mediapipe/examples/ios/posetrackingsolutiongpu/AppDelegate.mm b/mediapipe/examples/ios/posetrackingsolutiongpu/AppDelegate.mm new file mode 100644 index 000000000..3e08df40f --- /dev/null +++ b/mediapipe/examples/ios/posetrackingsolutiongpu/AppDelegate.mm @@ -0,0 +1,61 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import "AppDelegate.h" + +#import "CommonViewController.h" + +@interface AppDelegate () + +@end + +@implementation AppDelegate + +- (BOOL)application:(UIApplication *)application + didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { + // Override point for customization after application launch. + return YES; +} + +- (void)applicationWillResignActive:(UIApplication *)application { + // Sent when the application is about to move from active to inactive state. This can occur for + // certain types of temporary interruptions (such as an incoming phone call or SMS message) or + // when the user quits the application and it begins the transition to the background state. Use + // this method to pause ongoing tasks, disable timers, and invalidate graphics rendering + // callbacks. Games should use this method to pause the game. +} + +- (void)applicationDidEnterBackground:(UIApplication *)application { + // Use this method to release shared resources, save user data, invalidate timers, and store + // enough application state information to restore your application to its current state in case + // it is terminated later. If your application supports background execution, this method is + // called instead of applicationWillTerminate: when the user quits. +} + +- (void)applicationWillEnterForeground:(UIApplication *)application { + // Called as part of the transition from the background to the active state; here you can undo + // many of the changes made on entering the background. +} + +- (void)applicationDidBecomeActive:(UIApplication *)application { + // Restart any tasks that were paused (or not yet started) while the application was inactive. If + // the application was previously in the background, optionally refresh the user interface. +} + +- (void)applicationWillTerminate:(UIApplication *)application { + // Called when the application is about to terminate. Save data if appropriate. See also + // applicationDidEnterBackground:. +} + +@end diff --git a/mediapipe/examples/ios/posetrackingsolutiongpu/Assets.xcassets/AppIcon.appiconset/40_c_1x.png b/mediapipe/examples/ios/posetrackingsolutiongpu/Assets.xcassets/AppIcon.appiconset/40_c_1x.png new file mode 100644 index 0000000000000000000000000000000000000000..ed44f758e410c12f23ac9b943401f131d4b4b155 GIT binary patch literal 396 zcmeAS@N?(olHy`uVBq!ia0vp^8X(NU1|)m_?Z^dEoCO|{#S9GG!XV7ZFl&wkP>``W z$lZxy-8q?;Kn_c~qpu?a!^VE@KZ&eBIYmzw$B+ufw==A{4mpUp&cE-(a+Eb=VQX*F zW3ikqm)#0v#mu52cQBeO?2vE_P>i^t5pkn~QP941)`AIXK1St6Ef(xdhaXRzlk{$n zfufZE{3pqWo)k|}=n`@XY|=O=@aD59zP`&i>|A=ILK0^*6p9ZpwRkE^Lc!ywA>uNxmF`pBhf4rQ0d}IG^06zE;OZ z@QJ53w|mnr)y`wjwl@PsWH`HgmPX4gjY!eDEA%E(hdD#+G{dZ;d6&;JZ(&beB0cM< z(Ir`+xbocz3wlbNH_!O`{LHK8p0zDfYa3%PKdf3@q?WqwcX97D_t=cT;)SB zSknVm?rL2Y?)wMSFWkB%E>OcLy*q-7(eV+h&KFi62N4ld1+l0XJ^_N0IE??_oOAQx z|5G=eCG66^znL!_WU%}9{5F=$sT?e_uuIH%s%;Wk>sn_rtMp; zH&+(sX06ZPxq^S8!q=($Zs(+?uH1NW|CRgpXL45F+4AznD`&yot>tNIv&~*>+%hfa zN?OnF>hs{#kBHmZ!QaHX_S#M9GntswJ^4ZO`#jtERrk;HtD49Dc(wB0w^`YIQBT|+ z96OnMI`~J@^=&!nSC!0}R&UEbck^B3FS}g7J0Jfpp1D0c^x$=&c$fRkn~!xy>Cc}z zBSDC9p@RSy%gL+88-#COn|^1`f%!SREv!CCU!0bE%VKNxyIEp81#hg(dF`FYQl@qL zmy+6t{cC2KPP%zb+^}(D+SN6J!V=kGXQiN=TK*ox)oHwwIly7m@hZlG^@v}>nn=^^ zE6)zCTwo^d$GUarhP1M2E^E1$Pnvs`Wf_zRO;^%PNmmmDx&`Q(D^7KdGT}p^J!mKTr#?S sEKJ_3@&CQHgDOl;jS3BPgg&ebxsLQ0N;ilV*mgE literal 0 HcmV?d00001 diff --git a/mediapipe/examples/ios/posetrackingsolutiongpu/Assets.xcassets/AppIcon.appiconset/40_c_3x.png b/mediapipe/examples/ios/posetrackingsolutiongpu/Assets.xcassets/AppIcon.appiconset/40_c_3x.png new file mode 100644 index 0000000000000000000000000000000000000000..c96e2f0aa3aeb73228740ffcc35b2d9e79e86453 GIT binary patch literal 855 zcmeAS@N?(olHy`uVBq!ia0vp^6(G#P1|%(0%q{^b&H|6fVg?393lL^>oo1K-6l5$8 za(7}_cTVOdki(Mh=Vn3EQ^-V@w@`E1AcbLZ-x)hKB0 z;n{Kg&YPHjbJr|A?q9ExIM>t3*kDfMLmr;=1PkWFk`m_*6tJ}sD|F}P7N*zF&o5=a zZhm>L_VoI{C-2o*c~<8?&#bSz^4BUkYW3fnOKu+Je);q6+bxDQeo%kB*m)}P$I>)y4QPFD}?j(+-o$I-W!M7yZBb^#S$!`NM}-!Xm}< zF9IxqL7~WOEg7VjlA!K?g1<>aEjS3d50JS{G7 z8-L30x^DRQqy72$lQTO%Ec zzdpY5zCE&F@s56;S!Ms)-x(LD6g{xGwmnfS$MW*CK0jWW!@tbVH2F`TQMK!$y63g+ zSLItGKWur(P!=Z~VZY~}!i^UU%Re>Umy+WCWi{D)s`#E=UmcwmCa{~_Z{?n+Z{@c$ zxbk4Xechy^i)}Bot2hP*}>=iogv)!ufU*x~=^-c#_Z+*Uc@4Z3zrnsHr z-5cYTYI!_-znpK43p{B)Z{LL9tp}oAzk5oHSij$3C2HQ?zVYg#y0yW()HQZ+PpwSN zjSYAp!iy2AA8dDY-#w?hysgRl%W2*0Q-Rx0t4m!hKE3jxi0=QQ(}$PHm-h5st+jDD zXmsmNQI?i|#g&~;uY|;XSOHYCa?gXn=hK3Lj$aHif+QAn)lis^jo^U!RTCLud@FZqq`J!51r?Wubujfu%fLvhkv^O1Gm{6c0A;ZN6 zVcuCQ?t4nm*B}dMMBuUvkgk|D;=YGY*6qzmQvUsQQD^U)>)mQ*o8t9n-AGu>Jm(Fo zae1zC^}#c-w@<_w6kj*nB!1@3R$g)b$TP9+kDY)b4|hbSw(opxa7TCXkvpZ9SZ&?Q zZd@MAXWle(H{ao-B4_WETIjJDm&^Kn1Bz^$xHsY1y4)1~89TG3Z*r%-yTv9g`yge1 zoXP9kC+haT(7JyAgk!<0J4I7p>${vNO$SOi{jFyFDz5x=SL_<`w|n=+b*3(hmfbC& zc|CUeP4Ds}C%(t;e|m8C_f7lbrf+Un`_*^V`2VY0RV(ipdHvn!_cyA)aIN3pY}MD_ z7ptAt*6r-rSH0|vuJV=rHy76#JFc_dl`u&>r=)>kJ^?D&@%#HlmjiW1f0^ZTF7}@k TSoIp1zZpDT{an^LB{Ts5GfVi= literal 0 HcmV?d00001 diff --git a/mediapipe/examples/ios/posetrackingsolutiongpu/Assets.xcassets/AppIcon.appiconset/60_c_iphone_3x.png b/mediapipe/examples/ios/posetrackingsolutiongpu/Assets.xcassets/AppIcon.appiconset/60_c_iphone_3x.png new file mode 100644 index 0000000000000000000000000000000000000000..1e905ec27a7c714debee5305c43b83cdfdfc533e GIT binary patch literal 1744 zcmb7Fdpy%?7$0+)IYXkIT;`H=b`-IpV#hX$k?0WCH7tt0em{a8#Vn;|zy^ zjQ*-x(>bRotbYDF?UMVi15LPn!LN{zyQIB84B=r>9o2Mp3(Lgn4+jO$OE*ns&+L z8wBs@JyEky%%|p;r|UR>bH}P<+qx6nyiO@^1bZaY9!ua(BP+`6rjOF_VogO{G%LFF z>5LC5Wvhre+cytJQ=A_gOL^h?oq;|B- z;q9K46QfR~Yn)u#NERZBdA#lULFhEaKD_VtfqeM3(#3WsBFwWYcsUVpvV% zy7V0)4YP$d+0I_n0ji?W6eCOtaw*>Hh`vU-*Ds+u(AkkJce%wIkh6eX#T!^xUSOSp zr6f3xr(_mMV84tQ$$F2Rdae94du8wvz)zK&U$$U)P+}l)$1|DRqU#uR7IuTPmjHu^ zC|baDOI=Pi3No>a?aRg7Pdp?~`3^h1wsA;vC5!Os{J#KNkxh|4`b1!^VB=3 zIuGZj$5#&ALThocVew@jYdusRDjG#_A;crV?-9~BVx?RWG?m;m2sd9E7N?AP+COei zDh2|y7ExV1#@(gM6Wyb{6LKa$cu6Cq3wj6=UP=K!$DUvH<~g{gIburyJKk)4;k&?s zejuY<5tU@7SrIb)FO8a{bBI(Rp@*i?NLU)bX0pP*3MhSJj4&?Cx=NN-?X89MF+^M1 z?n2Vp6Gk|{K5#%Lgaj1K1%oxW^6k`WMMoDa0TGnwRfo@n9P$4YXk%%ch3ug~0d{kU zmz{qm1Rr_M_-m=ik9X3ww1sT(S3BC=9kg9$gxgKrAGL#E6jpCq2KdYS?9Wae)^CI_ z!a0>s$Ji zH#tj*P+&`%hmQG5vkf#`H7cCOx+HdA?F+3I=0atEvet-nP?d7m5_ zAkD7+5e#7MXMC>-teZaLl6u|v1>@$n3FkM5Eg7PAdR~ZcyHZOu&;Rk>Z2t@P=B-G-`>Hh$Wk0Jm7 literal 0 HcmV?d00001 diff --git a/mediapipe/examples/ios/posetrackingsolutiongpu/Assets.xcassets/AppIcon.appiconset/76_c_Ipad.png b/mediapipe/examples/ios/posetrackingsolutiongpu/Assets.xcassets/AppIcon.appiconset/76_c_Ipad.png new file mode 100644 index 0000000000000000000000000000000000000000..8e40730507aa3cbca04db9372fbf41eb2514de06 GIT binary patch literal 2655 zcmaJ@4LFl~8-GN;KSBoh8L-D%m{a@Grx$oz`f4}?Vf8EdjdVciwa#K^@ zs0;vrnmgJV3rY{^R+I;yZ8-`Li0Famr03Sh9 z;xGV2!2w`22mlbd0H7ODAUxmzIx@&O_YhA{UKg%R zWKaWqWj1}$kIHOc#-F7Ca!Z&HP)ZjX+)~b>lr5H7j##|r3)-Vtc3GFwDhG7iK%o?Y z#xnsxMO(UM0B(UE0LZ#h4&qoiPY(o%PK6SJ=|nP=O^uMU00%Y#RHfAI8Seg6P-bZ*h1k@xB*HT0)aR%f=-ew5v3&PwBI0{&}+ zmB!Ts41+=j4_;c>mp)e7R^z>o(OGnG2pANSJB>wVfQPWe{#@R-5i4RzPZr@#VUxpg z&J-${ws?*zh zCCPVhm3*vZ`Fk} z7({^JWaPfSX1D`lT(L4OrRs{f;+H70YhO|zr)&JoS^0TA>bv6cC+lbG?#2ldqmyD( z1K1iqGnk}M{qWSw8fn)@+-tQSr5v|W-Im29#`Rr4IAo*RvJM!(#olq4%(;X!77b96 zpT}(sbpDAShrX_gE?rkk49mOvU-eJZldut6q7O-6?Ew z8@oD^^7~c|=zjhD*z&CurXe*yK(*m*uxGYkZ`bT6x0%h_ihpl7JB-0Z?={Hr8;3tP ztgm=uPOv`NN>`1~(G*!EG-!FP(Y`)!ubO@u-bxDUz~7VtEM<&>3$-Zf>0zEf1-UKmT8`i}Pm+2-Pry*X5g^aGL0*}^V^5$^Yn zihq_k#yNV_05M1uIc*N+`Qr7z==cxSc)2LCKKPj6>Z20YWqr++z{OK(`=2Nw#yhpIs<~c@69{L5T=5@iAV3_UnJy>QR*xV?Yc*UUGyx% zFGla>Hrti+1Xp|o^Y(-L4)q`^`gg_YGVgRBy}<9{!bkgwRd%j4tjmx=x>mri@39>F z)_rFB*)j9%*bcdZc@=?0N1gx8VYjR_xGobJsx%-p(v1{AN3d0gtqH!U^z+tZiCq7i z4~zduJz~~;Yb-0Wsx4pVGRNy&aeVEn6)w9NCagyZY4^O^r_Ftn@s<1E{K=TxG=cv6wTT#1yN>5F8KD;kuLNe3Ny{ArieOER9RgW%`qnwZtp0OjUK+5r52S1db1Fy-VpH2S%<=NNlK-edLX)*yAm~FK# T8cyd(f0f)_yqv4{2A%o`#qTX2 literal 0 HcmV?d00001 diff --git a/mediapipe/examples/ios/posetrackingsolutiongpu/Assets.xcassets/AppIcon.appiconset/76_c_Ipad_2x.png b/mediapipe/examples/ios/posetrackingsolutiongpu/Assets.xcassets/AppIcon.appiconset/76_c_Ipad_2x.png new file mode 100644 index 0000000000000000000000000000000000000000..d28effc397a965eb21f1882f8fa3d9efec1614fc GIT binary patch literal 1180 zcmeAS@N?(olHy`uVBq!ia0vp^GeDSw4M<8HQcwg^oCO|{#S9E$svykh8Km+7D9BhG zoxS<87k76wr@+n042n$6B8`q+2NYZrCMX$r2udvB;0a=3 z)1*-N+p;r0a+1E6A17z7`e9l3;=#A~^Xl*2`L}cTH>=#5ztc`7{5_cU^b>2z;oskv z-WFbdsrRyanQh*`r@y{2@BH~WJn8SNYvCuA45~lK#aYZh*T+*{y2mzJ->&M?*X9$y z>NZvIrky{NCvTH{{MP*qb*8Vice0zs?df|_a_fHQ-?L9HUhF%*ai9M81E*{*FYE6u zwQ2iWyOX_aS$pvR|8M4syq+E%f7W(Grx1U_?_8_b-}mp6l3qJOX~U#@({9|@^0ws# z$C@Qed-+$!v$XY1yZ=@GhR?>wa zy6joxpUu-1H*noZulsg0-aOsNx;R#=@9(6O?H99d{@v{t8<}E0f1k(qu7*6(-S4+K z-nw^qllQ}-k7)@jEB2;MzoMqNp)1lSJUmSJ-$Nlz%<#X#zB=OWGqI~2Ym|1M)x9;P z|8ItjPf>U#6os5op@gj^D0_{C@z6a8yEEdTnE_ zYW+jz>91^`gfp?fGLEQD+UafI%K-hHODcDK#BvWWYAl?pNS0*CJFPx@MG`lxQ=&b9l)WUOyL{q@#(Vpi?e zg(jsx{ffRl{yx9t|JmOq2^ku5t!*~F-uUgzo^6|MUA~j#_xil(>-hJDf8Rg)IyEXY z(QtM7>22Sa%!y9 + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/mediapipe/examples/ios/posetrackingsolutiongpu/Base.lproj/Main.storyboard b/mediapipe/examples/ios/posetrackingsolutiongpu/Base.lproj/Main.storyboard new file mode 100644 index 000000000..fcf71c0e2 --- /dev/null +++ b/mediapipe/examples/ios/posetrackingsolutiongpu/Base.lproj/Main.storyboard @@ -0,0 +1,49 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/mediapipe/examples/ios/posetrackingsolutiongpu/CommonViewController.h b/mediapipe/examples/ios/posetrackingsolutiongpu/CommonViewController.h new file mode 100644 index 000000000..d7cb1121a --- /dev/null +++ b/mediapipe/examples/ios/posetrackingsolutiongpu/CommonViewController.h @@ -0,0 +1,67 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import + +#import "mediapipe/objc/MPPCameraInputSource.h" +#import "mediapipe/objc/MPPGraph.h" +#import "mediapipe/objc/MPPLayerRenderer.h" +#import "mediapipe/objc/MPPPlayerInputSource.h" +#import "mediapipe/objc/MPPTimestampConverter.h" + +typedef NS_ENUM(NSInteger, MediaPipeDemoSourceMode) { + MediaPipeDemoSourceCamera, + MediaPipeDemoSourceVideo +}; + +@interface CommonViewController : UIViewController + +// The MediaPipe graph currently in use. Initialized in viewDidLoad, started in +// viewWillAppear: and sent video frames on videoQueue. +@property(nonatomic) MPPGraph* mediapipeGraph; + +// Handles camera access via AVCaptureSession library. +@property(nonatomic) MPPCameraInputSource* cameraSource; + +// Provides data from a video. +@property(nonatomic) MPPPlayerInputSource* videoSource; + +// Helps to convert timestamp. +@property(nonatomic) MPPTimestampConverter* timestampConverter; + +// The data source for the demo. +@property(nonatomic) MediaPipeDemoSourceMode sourceMode; + +// Inform the user when camera is unavailable. +@property(nonatomic) IBOutlet UILabel* noCameraLabel; + +// Display the camera preview frames. +@property(strong, nonatomic) IBOutlet UIView* liveView; + +// Render frames in a layer. +@property(nonatomic) MPPLayerRenderer* renderer; + +// Process camera frames on this queue. +@property(nonatomic) dispatch_queue_t videoQueue; + +// Graph name. +@property(nonatomic) NSString* graphName; + +// Graph input stream. +@property(nonatomic) const char* graphInputStream; + +// Graph output stream. +@property(nonatomic) const char* graphOutputStream; + +@end diff --git a/mediapipe/examples/ios/posetrackingsolutiongpu/CommonViewController.mm b/mediapipe/examples/ios/posetrackingsolutiongpu/CommonViewController.mm new file mode 100644 index 000000000..f6c47eacf --- /dev/null +++ b/mediapipe/examples/ios/posetrackingsolutiongpu/CommonViewController.mm @@ -0,0 +1,201 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import "CommonViewController.h" + +static const char* kVideoQueueLabel = "com.google.mediapipe.example.videoQueue"; + +@implementation CommonViewController + +// This provides a hook to replace the basic ViewController with a subclass when it's created from a +// storyboard, without having to change the storyboard itself. ++ (instancetype)allocWithZone:(struct _NSZone*)zone { + NSString* subclassName = [[NSBundle mainBundle] objectForInfoDictionaryKey:@"MainViewController"]; + if (subclassName.length > 0) { + Class customClass = NSClassFromString(subclassName); + Class baseClass = [CommonViewController class]; + NSAssert([customClass isSubclassOfClass:baseClass], @"%@ must be a subclass of %@", customClass, + baseClass); + if (self == baseClass) return [customClass allocWithZone:zone]; + } + return [super allocWithZone:zone]; +} + +#pragma mark - Cleanup methods + +- (void)dealloc { + self.mediapipeGraph.delegate = nil; + [self.mediapipeGraph cancel]; + // Ignore errors since we're cleaning up. + [self.mediapipeGraph closeAllInputStreamsWithError:nil]; + [self.mediapipeGraph waitUntilDoneWithError:nil]; +} + +#pragma mark - MediaPipe graph methods + ++ (MPPGraph*)loadGraphFromResource:(NSString*)resource { + // Load the graph config resource. + NSError* configLoadError = nil; + NSBundle* bundle = [NSBundle bundleForClass:[self class]]; + if (!resource || resource.length == 0) { + return nil; + } + NSURL* graphURL = [bundle URLForResource:resource withExtension:@"binarypb"]; + NSData* data = [NSData dataWithContentsOfURL:graphURL options:0 error:&configLoadError]; + if (!data) { + NSLog(@"Failed to load MediaPipe graph config: %@", configLoadError); + return nil; + } + + // Parse the graph config resource into mediapipe::CalculatorGraphConfig proto object. + mediapipe::CalculatorGraphConfig config; + config.ParseFromArray(data.bytes, data.length); + + // Create MediaPipe graph with mediapipe::CalculatorGraphConfig proto object. + MPPGraph* newGraph = [[MPPGraph alloc] initWithGraphConfig:config]; + return newGraph; +} + +#pragma mark - UIViewController methods + +- (void)viewDidLoad { + [super viewDidLoad]; + + self.renderer = [[MPPLayerRenderer alloc] init]; + self.renderer.layer.frame = self.liveView.layer.bounds; + [self.liveView.layer addSublayer:self.renderer.layer]; + self.renderer.frameScaleMode = MPPFrameScaleModeFillAndCrop; + + self.timestampConverter = [[MPPTimestampConverter alloc] init]; + + dispatch_queue_attr_t qosAttribute = dispatch_queue_attr_make_with_qos_class( + DISPATCH_QUEUE_SERIAL, QOS_CLASS_USER_INTERACTIVE, /*relative_priority=*/0); + self.videoQueue = dispatch_queue_create(kVideoQueueLabel, qosAttribute); + + self.graphName = [[NSBundle mainBundle] objectForInfoDictionaryKey:@"GraphName"]; + self.graphInputStream = + [[[NSBundle mainBundle] objectForInfoDictionaryKey:@"GraphInputStream"] UTF8String]; + self.graphOutputStream = + [[[NSBundle mainBundle] objectForInfoDictionaryKey:@"GraphOutputStream"] UTF8String]; + + self.mediapipeGraph = [[self class] loadGraphFromResource:self.graphName]; + [self.mediapipeGraph addFrameOutputStream:self.graphOutputStream + outputPacketType:MPPPacketTypePixelBuffer]; + + self.mediapipeGraph.delegate = self; +} + +// In this application, there is only one ViewController which has no navigation to other view +// controllers, and there is only one View with live display showing the result of running the +// MediaPipe graph on the live video feed. If more view controllers are needed later, the graph +// setup/teardown and camera start/stop logic should be updated appropriately in response to the +// appearance/disappearance of this ViewController, as viewWillAppear: can be invoked multiple times +// depending on the application navigation flow in that case. +- (void)viewWillAppear:(BOOL)animated { + [super viewWillAppear:animated]; + + switch (self.sourceMode) { + case MediaPipeDemoSourceVideo: { + NSString* videoName = [[NSBundle mainBundle] objectForInfoDictionaryKey:@"VideoName"]; + AVAsset* video = [AVAsset assetWithURL:[[NSBundle mainBundle] URLForResource:videoName + withExtension:@"mov"]]; + self.videoSource = [[MPPPlayerInputSource alloc] initWithAVAsset:video]; + [self.videoSource setDelegate:self queue:self.videoQueue]; + dispatch_async(self.videoQueue, ^{ + [self.videoSource start]; + }); + break; + } + case MediaPipeDemoSourceCamera: { + self.cameraSource = [[MPPCameraInputSource alloc] init]; + [self.cameraSource setDelegate:self queue:self.videoQueue]; + self.cameraSource.sessionPreset = AVCaptureSessionPresetHigh; + + NSString* cameraPosition = + [[NSBundle mainBundle] objectForInfoDictionaryKey:@"CameraPosition"]; + if (cameraPosition.length > 0 && [cameraPosition isEqualToString:@"back"]) { + self.cameraSource.cameraPosition = AVCaptureDevicePositionBack; + } else { + self.cameraSource.cameraPosition = AVCaptureDevicePositionFront; + // When using the front camera, mirror the input for a more natural look. + _cameraSource.videoMirrored = YES; + } + + // The frame's native format is rotated with respect to the portrait orientation. + _cameraSource.orientation = AVCaptureVideoOrientationPortrait; + + [self.cameraSource requestCameraAccessWithCompletionHandler:^void(BOOL granted) { + if (granted) { + dispatch_async(dispatch_get_main_queue(), ^{ + self.noCameraLabel.hidden = YES; + }); + [self startGraphAndCamera]; + } + }]; + + break; + } + } +} + +- (void)startGraphAndCamera { + // Start running self.mediapipeGraph. + NSError* error; + if (![self.mediapipeGraph startWithError:&error]) { + NSLog(@"Failed to start graph: %@", error); + } + else if (![self.mediapipeGraph waitUntilIdleWithError:&error]) { + NSLog(@"Failed to complete graph initial run: %@", error); + } + + // Start fetching frames from the camera. + dispatch_async(self.videoQueue, ^{ + [self.cameraSource start]; + }); +} + +#pragma mark - MPPInputSourceDelegate methods + +// Must be invoked on self.videoQueue. +- (void)processVideoFrame:(CVPixelBufferRef)imageBuffer + timestamp:(CMTime)timestamp + fromSource:(MPPInputSource*)source { + if (source != self.cameraSource && source != self.videoSource) { + NSLog(@"Unknown source: %@", source); + return; + } + + [self.mediapipeGraph sendPixelBuffer:imageBuffer + intoStream:self.graphInputStream + packetType:MPPPacketTypePixelBuffer + timestamp:[self.timestampConverter timestampForMediaTime:timestamp]]; +} + +#pragma mark - MPPGraphDelegate methods + +// Receives CVPixelBufferRef from the MediaPipe graph. Invoked on a MediaPipe worker thread. +- (void)mediapipeGraph:(MPPGraph*)graph + didOutputPixelBuffer:(CVPixelBufferRef)pixelBuffer + fromStream:(const std::string&)streamName { + if (streamName == self.graphOutputStream) { + // Display the captured image on the screen. + CVPixelBufferRetain(pixelBuffer); + dispatch_async(dispatch_get_main_queue(), ^{ + [self.renderer renderPixelBuffer:pixelBuffer]; + CVPixelBufferRelease(pixelBuffer); + }); + } +} + +@end diff --git a/mediapipe/examples/ios/posetrackingsolutiongpu/Info.plist b/mediapipe/examples/ios/posetrackingsolutiongpu/Info.plist index 71e2e429e..35d4990ac 100644 --- a/mediapipe/examples/ios/posetrackingsolutiongpu/Info.plist +++ b/mediapipe/examples/ios/posetrackingsolutiongpu/Info.plist @@ -2,6 +2,44 @@ + + NSCameraUsageDescription + This app uses the camera to demonstrate live video processing. + CFBundleDevelopmentRegion + en + CFBundleExecutable + $(EXECUTABLE_NAME) + CFBundleIdentifier + $(PRODUCT_BUNDLE_IDENTIFIER) + CFBundleInfoDictionaryVersion + 6.0 + CFBundleName + $(PRODUCT_NAME) + CFBundlePackageType + APPL + CFBundleShortVersionString + 1.0 + CFBundleVersion + 1 + LSRequiresIPhoneOS + + UILaunchStoryboardName + LaunchScreen + UIMainStoryboardFile + Main + UIRequiredDeviceCapabilities + + armv7 + + UISupportedInterfaceOrientations + + UIInterfaceOrientationPortrait + + UISupportedInterfaceOrientations~ipad + + UIInterfaceOrientationPortrait + + CameraPosition back MainViewController diff --git a/mediapipe/examples/ios/posetrackingsolutiongpu/PoseTrackingViewController.h b/mediapipe/examples/ios/posetrackingsolutiongpu/PoseTrackingViewController.h index f5dc4674a..3bcfa7391 100644 --- a/mediapipe/examples/ios/posetrackingsolutiongpu/PoseTrackingViewController.h +++ b/mediapipe/examples/ios/posetrackingsolutiongpu/PoseTrackingViewController.h @@ -14,7 +14,7 @@ #import -#import "mediapipe/examples/ios/common/CommonViewController.h" +#import "CommonViewController.h" @interface PoseTrackingViewController : CommonViewController diff --git a/mediapipe/examples/ios/posetrackingsolutiongpu/PoseTrackingViewController.mm b/mediapipe/examples/ios/posetrackingsolutiongpu/PoseTrackingViewController.mm index 0f082031c..c80f19283 100644 --- a/mediapipe/examples/ios/posetrackingsolutiongpu/PoseTrackingViewController.mm +++ b/mediapipe/examples/ios/posetrackingsolutiongpu/PoseTrackingViewController.mm @@ -15,6 +15,7 @@ #import "PoseTrackingViewController.h" #include "mediapipe/framework/formats/landmark.pb.h" +#include "mediapipe/objc/solutions/posetracking_gpu/PoseTrackingOptions.h" static const char* kLandmarksOutputStream = "pose_landmarks"; @@ -23,10 +24,19 @@ static const char* kLandmarksOutputStream = "pose_landmarks"; #pragma mark - UIViewController methods - (void)viewDidLoad { + + [super viewDidLoad]; - + PoseTrackingOptions* options = [ [PoseTrackingOptions alloc] initWithShowLandmarks:true cameraRotation:0]; [self.mediapipeGraph addFrameOutputStream:kLandmarksOutputStream outputPacketType:MPPPacketTypeRaw]; + [self.mediapipeGraph addFrameOutputStream:"throttled_input_video" + outputPacketType:MPPPacketTypePixelBuffer]; + if (options.showLandmarks){ + self.graphOutputStream = "output_video"; + }else{ + self.graphOutputStream = "throttled_input_video"; + } } #pragma mark - MPPGraphDelegate methods diff --git a/mediapipe/examples/ios/posetrackingsolutiongpu/main.m b/mediapipe/examples/ios/posetrackingsolutiongpu/main.m new file mode 100644 index 000000000..7ffe5ea5d --- /dev/null +++ b/mediapipe/examples/ios/posetrackingsolutiongpu/main.m @@ -0,0 +1,22 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import +#import "AppDelegate.h" + +int main(int argc, char * argv[]) { + @autoreleasepool { + return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class])); + } +}