Skip to content

Commit

Permalink
- updated cognitive service URLS
Browse files Browse the repository at this point in the history
- finally: storyboards in playground books
  • Loading branch information
codePrincess committed Aug 31, 2017
1 parent 823a907 commit a13a709
Show file tree
Hide file tree
Showing 48 changed files with 1,031 additions and 19 deletions.
Expand Up @@ -142,7 +142,7 @@ var canvasView = UIView(frame: CGRect(x: 0, y: 0, width: 450, height: 630))
//var canvasView = UIView(frame: CGRect(x: 0, y: 0, width: 1024, height: 630))

myCanvas.frame = canvasView.frame
canvasView.addSubview((myCanvas))
canvasView.addSubview(myCanvas)

PlaygroundPage.current.liveView = canvasView

Expand Down
Expand Up @@ -162,11 +162,14 @@ enum CognitiveServicesKeys {
/// Caseless enum of various configuration parameters.
/// See https://dev.projectoxford.ai/docs/services/56f91f2d778daf23d8ec6739/operations/56f91f2e778daf14a499e1fa for details
enum CognitiveServicesConfiguration {
static let AnalyzeURL = "https://api.projectoxford.ai/vision/v1.0/analyze"

static let HandwrittenOcrURL = "https://westeurope.api.cognitive.microsoft.com/vision/v1.0/recognizeText"
static let HandwrittenResultURL = "https://westeurope.api.cognitive.microsoft.com/vision/v1.0/textOperations/"

static let AnalyzeURL = "https://westeurope.api.cognitive.microsoft.com/vision/v1.0/analyze"
static let EmotionURL = "https://westus.api.cognitive.microsoft.com/emotion/v1.0/recognize"
static let FaceDetectURL = "https://api.projectoxford.ai/face/v1.0/detect"
static let FaceDetectURL = "https://westeurope.api.cognitive.microsoft.com/face/v1.0/detect"

static let JPEGCompressionQuality = 0.9 as CGFloat
static let RequiredConfidence = 0.85
}
Expand Down
2 changes: 1 addition & 1 deletion GreatStuffWithThePencil.playground/contents.xcplayground
@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<playground version='6.0' target-platform='ios' display-mode='rendered'>
<playground version='6.0' target-platform='ios' display-mode='raw'>
<pages>
<page name='Get Started'/>
<page name='Smooth Doodeling'/>
Expand Down
Expand Up @@ -15,15 +15,15 @@ guard #available(iOS 9, OSX 10.11, *) else {
fatalError("Life? Don't talk to me about life. Here I am, brain the size of a planet, and they tell me to run a 'playground'. Call that job satisfaction? I don't.")
}

let myView = UIView(frame: CGRect(x: 0, y: 0, width: 450, height: 600))
let myView = UIView(frame: CGRect(x: 0, y: 0, width: 600, height: 900))

let preview = UIImageView(frame: myView.bounds)
//#-end-hidden-code
/*:
* experiment:
Choose your preferred image right here or take a new one
*/
preview.image = /*#-editable-code*/#imageLiteral(resourceName: "Jan.png")/*#-end-editable-code*/
preview.image = /*#-editable-code*/#imageLiteral(resourceName: "containers.png")/*#-end-editable-code*/
//#-hidden-code
preview.contentMode = .scaleAspectFit

Expand Down Expand Up @@ -71,7 +71,7 @@ func setTagsAsDescription (_ tags : [String]?) {
* experiment:
Every part of the description of the picture will be returned with a certain confidence. A good value is 0.85 for nice fitting results. But go a head and play around with this value and see, with what funky descriptions the "computer" may come along
*/
showTagsForImage(preview, /*#-editable-code*/0.1/*#-end-editable-code*/)
showTagsForImage(preview, /*#-editable-code*/0.3/*#-end-editable-code*/)
//#-hidden-code
PlaygroundPage.current.liveView = myView
//#-end-hidden-code
Expand Down
Expand Up @@ -59,7 +59,7 @@ class MyLandmarkView : UIView {
}
}

let myView = UIView(frame: CGRect(x: 0, y: 0, width: 430, height: 620))
let myView = UIView(frame: CGRect(x: 0, y: 0, width: 600, height: 900))

let preview = UIImageView(frame: myView.bounds)
let landmarkView = MyLandmarkView(frame: myView.bounds)
Expand All @@ -70,7 +70,7 @@ landmarkView.backgroundColor = .clear
* experiment:
Choose your preferred image right here or take a new one. We tell the API that we'd like to know about different features of the face like age, gender, facialHair and glasses. Moreover we ask for a unique face identifier and facial landmarks. The face identifier can be used to later identify the person. The facial landmarks tell us things like where the eyes, the pupil, the nose and the mouth is and let us know about their dimensions.
*/
preview.image = /*#-editable-code*/#imageLiteral(resourceName: "Photo on 26.06.17 at 09.21.jpg")/*#-end-editable-code*/
preview.image = /*#-editable-code*/#imageLiteral(resourceName: "Les.jpg")/*#-end-editable-code*/

//#-hidden-code
preview.contentMode = .scaleAspectFit
Expand Down
Expand Up @@ -19,7 +19,7 @@ guard #available(iOS 9, OSX 10.11, *) else {
//#-end-hidden-code

//#-hidden-code
let myView = UIView(frame: CGRect(x: 0, y: 0, width: 450, height: 600))
let myView = UIView(frame: CGRect(x: 0, y: 0, width: 600, height: 900))

let preview = UIImageView(frame: myView.bounds)
//#-end-hidden-code
Expand Down
Expand Up @@ -157,9 +157,9 @@ enum CognitiveServicesKeys {
/// Caseless enum of various configuration parameters.
/// See https://dev.projectoxford.ai/docs/services/56f91f2d778daf23d8ec6739/operations/56f91f2e778daf14a499e1fa for details
enum CognitiveServicesConfiguration {
static let AnalyzeURL = "https://api.projectoxford.ai/vision/v1.0/analyze"
static let EmotionURL = "https://api.projectoxford.ai/emotion/v1.0/recognize"
static let FaceDetectURL = "https://api.projectoxford.ai/face/v1.0/detect"
static let AnalyzeURL = "https://westeurope.api.cognitive.microsoft.com/vision/v1.0/analyze"
static let EmotionURL = "https://westus.api.cognitive.microsoft.com/emotion/v1.0/recognize"
static let FaceDetectURL = "https://westeurope.api.cognitive.microsoft.com/face/v1.0/detect"
static let JPEGCompressionQuality = 0.9 as CGFloat
static let RequiredConfidence = 0.85
}
Expand Down
Expand Up @@ -157,9 +157,9 @@ enum CognitiveServicesKeys {
/// Caseless enum of various configuration parameters.
/// See https://dev.projectoxford.ai/docs/services/56f91f2d778daf23d8ec6739/operations/56f91f2e778daf14a499e1fa for details
enum CognitiveServicesConfiguration {
static let AnalyzeURL = "https://api.projectoxford.ai/vision/v1.0/analyze"
static let EmotionURL = "https://api.projectoxford.ai/emotion/v1.0/recognize"
static let FaceDetectURL = "https://api.projectoxford.ai/face/v1.0/detect"
static let AnalyzeURL = "https://westeurope.api.cognitive.microsoft.com/vision/v1.0/analyze"
static let EmotionURL = "https://westus.api.cognitive.microsoft.com/emotion/v1.0/recognize"
static let FaceDetectURL = "https://westeurope.api.cognitive.microsoft.com/face/v1.0/detect"
static let JPEGCompressionQuality = 0.9 as CGFloat
static let RequiredConfidence = 0.85
}
Expand Down
6 changes: 3 additions & 3 deletions PlayWithYourSmile.playground/Sources/CognitiveServices.swift
Expand Up @@ -157,9 +157,9 @@ enum CognitiveServicesKeys {
/// Caseless enum of various configuration parameters.
/// See https://dev.projectoxford.ai/docs/services/56f91f2d778daf23d8ec6739/operations/56f91f2e778daf14a499e1fa for details
enum CognitiveServicesConfiguration {
static let AnalyzeURL = "https://api.projectoxford.ai/vision/v1.0/analyze"
static let EmotionURL = "https://api.projectoxford.ai/emotion/v1.0/recognize"
static let FaceDetectURL = "https://api.projectoxford.ai/face/v1.0/detect"
static let AnalyzeURL = "https://westeurope.api.cognitive.microsoft.com/vision/v1.0/analyze"
static let EmotionURL = "https://westus.api.cognitive.microsoft.com/emotion/v1.0/recognize"
static let FaceDetectURL = "https://westeurope.api.cognitive.microsoft.com/face/v1.0/detect"
static let JPEGCompressionQuality = 0.9 as CGFloat
static let RequiredConfidence = 0.85
}
Expand Down
@@ -0,0 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Version</key>
<string>1.0</string>
<key>Name</key>
<string>Chapter 1</string>
<key>Pages</key>
<array>
<string>Page1.playgroundpage</string>
</array>
</dict>
</plist>
@@ -0,0 +1,9 @@
/*:
# It's a me, tha storyboard!
Playground Books can do a lot. You might know this already. But have you ever thought of using your already
existing storyboards within your playgrounds and books? If not, now is the time :)
* callout(Storyboards!):
Build your view controller as you are used to and then just use them within your gorgeous Swift Playground Book.
*/

@@ -0,0 +1,7 @@
import PlaygroundSupport
import UIKit

let storyboard = UIStoryboard.init(name: "Main", bundle: Bundle.main)
let ctrl = storyboard.instantiateViewController(withIdentifier: "view")

PlaygroundPage.current.liveView = ctrl
@@ -0,0 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Name</key>
<string>Page 1</string>
<key>LiveViewMode</key>
<string>VisibleByDefault</string>
<key>LiveViewEdgeToEdge</key>
<true/>
<key>PlaygroundLoggingMode</key>
<string>Normal</string>
</dict>
</plist>
@@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Hints</key>
<array/>
</dict>
</plist>
26 changes: 26 additions & 0 deletions storyboard in books/Travel.playgroundbook/Contents/Manifest.plist
@@ -0,0 +1,26 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SwiftVersion</key>
<string>3.0</string>
<key>ImageReference</key>
<string>Icon.png</string>
<key>Version</key>
<string>3.0</string>
<key>ContentVersion</key>
<string>1.0</string>
<key>Name</key>
<string>Empty</string>
<key>ContentIdentifier</key>
<string>com.ms.demo.TravelLog</string>
<key>DeploymentTarget</key>
<string>ios10.0</string>
<key>DevelopmentRegion</key>
<string>en</string>
<key>Chapters</key>
<array>
<string>Chapter1.playgroundchapter</string>
</array>
</dict>
</plist>
Binary file not shown.
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Empty file.
Empty file.
@@ -0,0 +1,32 @@
//
// LogController.swift
// TravelLog
//
// Created by Manu Rink on 30.08.17.
// Copyright © 2017 microsoft. All rights reserved.
//

import Foundation
import UIKit

@objc(LogController)
class LogController : UIViewController {

@IBOutlet weak var greetingsLabel: UILabel!
var greeting : String?

override func viewDidLoad() {
super.viewDidLoad()

if let greetText = greeting {
greetingsLabel.text = "Hola \(greetText) :)"
} else {
greetingsLabel.text = "Hola you :)"
}
}

override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}

}
@@ -0,0 +1,31 @@
//
// ViewController.swift
// TravelLog
//
// Created by Manu Rink on 30.08.17.
// Copyright © 2017 microsoft. All rights reserved.
//

import UIKit

@objc(ViewController)
public class ViewController: UIViewController {

@IBOutlet weak var usernameTextField: UITextField!

override public func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
}

override public func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}

override public func prepare(for segue: UIStoryboardSegue, sender: Any?) {
let logCtrl = segue.destination as! LogController
logCtrl.greeting = usernameTextField.text
}

}

0 comments on commit a13a709

Please sign in to comment.