You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
//--Summary:// Create a program that can create a report of rune information from// lines of text.////--Requirements://* Create a single function to iterate over each line of text that is// provided in main().// - The function must return nothing and must execute a closure//* Using closures, determine the following information about the text and// print a report to the terminal:// - Number of letters// - Number of digits// - Number of spaces// - Number of punctuation marks////--Notes://* The `unicode` stdlib package provides functionality for rune classificationpackage main
import (
"fmt""unicode"
)
typeLineCallbackfunc(linestring)
funclineIterator(lines []string, opLineCallback) {
for_, l:=rangelines {
op(l)
}
}
funcmain() {
lines:= []string{
"There are",
"68 letters,",
"five digits,",
"12 spaces,",
"and 4 punctuation marks in these lines of text!",
}
var (
letters=0digits=0spaces=0punctuation=0
)
analyseLine:=func(linestring) {
for_, r:=rangeline {
switch {
caseunicode.IsLetter(r):
letters++caseunicode.IsDigit(r):
digits++caseunicode.IsSpace(r):
spaces++caseunicode.IsPunct(r):
punctuation++
}
}
}
lineIterator(lines, analyseLine)
fmt.Printf("Counts:\nLetters: %d\nDigits: %d\nSpaces: %d\nPunctuation: %d\n", letters, digits, spaces, punctuation)
}
Goroutines
//--Summary:// Create a program to read a list of numbers from multiple files,// sum the total of each file, then sum all the totals.////--Requirements://* Sum the numbers in each file noted in the main() function//* Add each sum together to get a grand total for all files// - Print the grand total to the terminal//* Launch a goroutine for each file//* Report any errors to the terminal////--Notes://* This program will need to be ran from the `lectures/exercise/goroutines`// directory:// cd lectures/exercise/goroutines// go run goroutines//* The grand total for the files is 4103109//* The data files intentionally contain invalid entries//* stdlib packages that will come in handy:// - strconv: parse the numbers into integers// - bufio: read each line in a file// - os: open files// - io: io.EOF will indicate the end of a file// - time: pause the program to wait for the goroutines to finishpackage main
import (
"bufio""fmt""os""strconv""time"
)
funcsumFile(scanner*bufio.Scanner) int {
sum:=0forscanner.Scan() {
num, err:=strconv.Atoi(scanner.Text())
iferr==nil {
sum+=num
}
}
returnsum
}
funcmain() {
files:= []string{"num1.txt", "num2.txt", "num3.txt", "num4.txt", "num5.txt"}
sum:=0for_, file:=rangefiles {
f, _:=os.Open(file)
deferf.Close()
scanner:=bufio.NewScanner(f)
gofunc() {
sum+=sumFile(scanner)
}()
}
time.Sleep(time.Millisecond*300)
fmt.Println("sum", sum)
}
Channels
//--Summary:// Create a program that utilizes goroutines to run the provided calculation// function on a number of jobs. The results from the goroutines must be// communicated back to the main thread using a channel, and then added// together.////--Requirements://* Run `longCalculation` for each job generated by the `makeJobs` function//* Each job must be run in a separate goroutine//* The result from `longCalculation` must be provided to the main function// using a channel//* Sum the results from each job to generate a final result, and print it// to the terminalpackage main
import (
"fmt""math/rand""time"
)
typeJobintfunclongCalculation(iJob) int {
duration:=time.Duration(rand.Intn(1000)) *time.Millisecondtime.Sleep(duration)
fmt.Printf("Job %d complete in %v\n", i, duration)
returnint(i) *30
}
funcmakeJobs() []Job {
jobs:=make([]Job, 0, 100)
fori:=0; i<100; i++ {
jobs=append(jobs, Job(rand.Intn(10000)))
}
returnjobs
}
funcrunJob(resultChanchan<-int, iJob) {
resultChan<-longCalculation(i)
}
funcmain() {
rand.New(rand.NewSource(time.Now().UnixNano()))
jobs:=makeJobs()
result:=make(chanint)
for_, job:=rangejobs {
gorunJob(result, job)
}
resultCount:=0sum:=0forresultCount<len(jobs) {
sum+=<-resultresultCount++
}
fmt.Println("sum", sum)
fmt.Println("resultCount", resultCount)
}
Synchronization
//--Summary:// Create a program that can read text from standard input and count the// number of letters present in the input.////--Requirements://* Count the total number of letters in any chosen input//* The input must be supplied from standard input//* Input analysis must occur per-word, and each word must be analyzed// within a goroutine//* When the program finishes, display the total number of letters counted////--Notes://* Use CTRL+D (Mac/Linux) or CTRL+Z (Windows) to signal EOF, if manually// entering data//* Use `cat FILE | go run ./exercise/sync` to analyze a file//* Use any synchronization techniques to implement the program:// - Channels / mutexes / wait groupspackage main
import (
"bufio""fmt""os""strings""sync""time""unicode"
)
typecounterstruct {
countint
sync.Mutex
}
funcsplitLine(linestring) []string {
returnstrings.Split(line, " ")
}
funccount(wg*sync.WaitGroup, counter*counter, wordstring) {
counter.Lock()
deferwg.Done()
defercounter.Unlock()
count:=0for_, r:=rangeword {
ifunicode.IsLetter(r) {
count++
}
}
counter.count+=countfmt.Printf("word: %s, lettercount: %d\n", word, count)
}
funcmain() {
start:=time.Now()
scanner:=bufio.NewScanner(os.Stdin)
varwg sync.WaitGroupvarcountercounterforscanner.Scan() {
for_, word:=rangesplitLine(scanner.Text()) {
wg.Add(1)
w:=wordgocount(&wg, &counter, w)
}
}
wg.Wait()
vartotalintcounter.Lock()
total=counter.countcounter.Unlock()
fmt.Println("total", total)
fmt.Println("duration", time.Since(start))
}
package main
import (
"bytes""encoding/base64""fmt""image"
_ "image/gif"
_ "image/jpeg"
_ "image/png""log""os""strings""github.com/chai2010/webp""github.com/google/uuid"
)
funcmakeWork(base64Images...string) <-chanstring {
// create output channelout:=make(chanstring)
// spawn goroutine so we don't need to waitgofunc() {
for_, encodedImg:=rangebase64Images {
out<-encodedImg
}
// use `close` to indicate that nothing// else will be sent on the channelclose(out)
}()
// return the output channel, which will be populated// with the images by the goroutinereturnout
}
funcpipeline[Iany, Oany](quit<-chanstruct{}, input<-chanI, processfunc(I) O) <-chanO {
out:=make(chanO)
gofunc() {
deferclose(out)
forin:=rangeinput {
select {
caseout<-process(in):
case<-quit:
return
}
}
}()
returnout
}
funcbase64ToRawImage(base64Imgstring) image.Image {
// we decode the encoded Base64 image to an image.Imagereader:=base64.NewDecoder(base64.StdEncoding, strings.NewReader(base64Img))
// second return value is the type of image, we don't need it for this demoimg, _, err:=image.Decode(reader)
iferr!=nil {
log.Fatal(err)
}
returnimg
}
funcencodeToWebp(img image.Image) bytes.Buffer {
varbuf bytes.Bufferiferr:=webp.Encode(&buf, img, &webp.Options{Lossless: true}); err!=nil {
log.Fatal(err)
}
returnbuf
}
funcsaveToDisk(imgBuf bytes.Buffer) string {
filename:=fmt.Sprintf("%v.webp", uuid.New().String())
os.WriteFile(filename, imgBuf.Bytes(), 0644)
returnfilename
}
funcmain() {
base64Images:=makeWork(img1, img2, img3)
quit:=make(chanstruct{})
varsignalstruct{}
rawImages:=pipeline(quit, base64Images, base64ToRawImage)
webpImages:=pipeline(quit, rawImages, encodeToWebp)
quit<-signalfilenames:=pipeline(quit, webpImages, saveToDisk)
forname:=rangefilenames {
fmt.Println(name)
}
}
Pipeline fan-in
package main
// We are starting with the same code from the// `pipeline` demo.import (
"bytes""encoding/base64""fmt""image"
_ "image/gif"
_ "image/jpeg"
_ "image/png""log""os""strings""sync""github.com/chai2010/webp""github.com/google/uuid"
)
funcmakeWork(base64Images...string) <-chanstring {
// create output channelout:=make(chanstring)
// spawn goroutine so we don't need to waitgofunc() {
for_, encodedImg:=rangebase64Images {
out<-encodedImg
}
// use `close` to indicate that nothing// else will be sent on the channelclose(out)
}()
// return the output channel, which will be populated// with the images by the goroutinereturnout
}
funcpipeline[Iany, Oany](input<-chanI, processfunc(I) O) <-chanO {
out:=make(chanO)
gofunc() {
forin:=rangeinput {
out<-process(in)
}
close(out)
}()
returnout
}
funcbase64ToRawImage(base64Imgstring) image.Image {
// we decode the encoded Base64 image to an image.Imagereader:=base64.NewDecoder(base64.StdEncoding, strings.NewReader(base64Img))
// second return value is the type of image, we don't need it for this demoimg, _, err:=image.Decode(reader)
iferr!=nil {
log.Fatal(err)
}
returnimg
}
funcencodeToWebp(img image.Image) bytes.Buffer {
varbuf bytes.Bufferiferr:=webp.Encode(&buf, img, &webp.Options{Lossless: true}); err!=nil {
log.Fatal(err)
}
returnbuf
}
funcsaveToDisk(imgBuf bytes.Buffer) string {
filename:=fmt.Sprintf("%v.webp", uuid.New().String())
os.WriteFile(filename, imgBuf.Bytes(), 0644)
returnfilename
}
funcfanIn[Tany](channels...<-chanT) <-chanT {
varwg sync.WaitGroupout:=make(chanT)
wg.Add(len(channels))
for_, ch:=rangechannels {
gofunc(in<-chanT) {
fori:=rangein {
out<-i
}
wg.Done()
}(ch)
}
gofunc() {
wg.Wait()
close(out)
}()
returnout
}
funcmain() {
base64Images:=makeWork(img1, img2, img3)
// stage 1rawImages1:=pipeline(base64Images, base64ToRawImage)
rawImages2:=pipeline(base64Images, base64ToRawImage)
rawImages3:=pipeline(base64Images, base64ToRawImage)
rawImages:=fanIn(rawImages1, rawImages2, rawImages3)
// stage 2webpImages1:=pipeline(rawImages, encodeToWebp)
webpImages2:=pipeline(rawImages, encodeToWebp)
webpImages3:=pipeline(rawImages, encodeToWebp)
webpImages:=fanIn(webpImages1, webpImages2, webpImages3)
filenames1:=pipeline(webpImages, saveToDisk)
filenames2:=pipeline(webpImages, saveToDisk)
filenames3:=pipeline(webpImages, saveToDisk)
filenames:=fanIn(filenames1, filenames2, filenames3)
forname:=rangefilenames {
fmt.Println(name)
}
}