diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml deleted file mode 100644 index f22921b7049..00000000000 --- a/.github/FUNDING.yml +++ /dev/null @@ -1,2 +0,0 @@ -ko_fi: coteschung -custom: https://sponsor.cotes.page diff --git a/.github/workflows/pages-deploy.yml b/.github/workflows/pages-deploy.yml new file mode 100644 index 00000000000..4f8346a64c3 --- /dev/null +++ b/.github/workflows/pages-deploy.yml @@ -0,0 +1,77 @@ +name: "Build and Deploy" +on: + push: + branches: + - master + - main + paths-ignore: + - .gitignore + - README.md + - LICENSE + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +permissions: + contents: read + pages: write + id-token: write + +# Allow one concurrent deployment +concurrency: + group: "pages" + cancel-in-progress: true + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + # submodules: true + # If using the 'assets' git submodule from Chirpy Starter, uncomment above + # (See: https://github.com/cotes2020/chirpy-starter/tree/main/assets) + + - name: Setup Pages + id: pages + uses: actions/configure-pages@v4 + + - name: Setup Ruby + uses: ruby/setup-ruby@v1 + with: +<<<<<<<< HEAD:.github/workflows/pages-deploy.yml + ruby-version: 2.7 # reads from a '.ruby-version' or '.tools-version' file if 'ruby-version' is omitted +======== + ruby-version: 3.3 +>>>>>>>> 54d4d59d22ac543a14bfbd9bb3d6fb6756056041:.github/workflows/starter/pages-deploy.yml + bundler-cache: true + + - name: Build site + run: bundle exec jekyll b -d "_site${{ steps.pages.outputs.base_path }}" + env: + JEKYLL_ENV: "production" + + - name: Test site + run: | + bundle exec htmlproofer _site \ + \-\-disable-external \ + \-\-ignore-urls "/^http:\/\/127.0.0.1/,/^http:\/\/0.0.0.0/,/^http:\/\/localhost/" + + - name: Upload site artifact + uses: actions/upload-pages-artifact@v3 + with: + path: "_site${{ steps.pages.outputs.base_path }}" + + deploy: + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + runs-on: ubuntu-latest + needs: build + steps: + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v4 diff --git a/.github/workflows/starter/pages-deploy.yml b/.github/workflows/starter/pages-deploy.yml index cc28f99fd25..4f8346a64c3 100644 --- a/.github/workflows/starter/pages-deploy.yml +++ b/.github/workflows/starter/pages-deploy.yml @@ -2,8 +2,8 @@ name: "Build and Deploy" on: push: branches: - - main - master + - main paths-ignore: - .gitignore - README.md @@ -42,7 +42,11 @@ jobs: - name: Setup Ruby uses: ruby/setup-ruby@v1 with: +<<<<<<<< HEAD:.github/workflows/pages-deploy.yml + ruby-version: 2.7 # reads from a '.ruby-version' or '.tools-version' file if 'ruby-version' is omitted +======== ruby-version: 3.3 +>>>>>>>> 54d4d59d22ac543a14bfbd9bb3d6fb6756056041:.github/workflows/starter/pages-deploy.yml bundler-cache: true - name: Build site diff --git a/README.md b/README.md index 7e57b2a52e2..4cc2e4b937e 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,10 @@ +<<<<<<< HEAD +
+ + + ![header](https://capsule-render.vercel.app/api?type=venom&color=gradient&height=300§ion=header&text=Germanus'%20GitHub) + +=======
@@ -16,10 +23,87 @@ [![Devices Mockup](https://chirpy-img.netlify.app/commons/devices-mockup.png)][demo] +>>>>>>> 54d4d59d22ac543a14bfbd9bb3d6fb6756056041
-## Features - +
+ + + ## ๐Ÿ‘€ About Me + #### :fire: AI / Backend / DevOps ๊ฐœ๋ฐœ์ž๊ฐ€ ๋˜๊ธฐ ์œ„ํ•ด ๊ณต๋ถ€ํ•˜๊ณ  ์žˆ์Šต๋‹ˆ๋‹ค.
+ #### :mortar_board: ๊ฒฝ์ƒ๊ตญ๋ฆฝ๋Œ€ํ•™๊ต(GNU), ํ•ญ๊ณต์šฐ์ฃผ๋ฐ์†Œํ”„ํŠธ์›จ์–ด๊ณตํ•™์ „๊ณต + #### :black_nib: [A Comparison of Pretrained Models for Classifying Issue Reports, IEEE Access](https://ieeexplore.ieee.org/document/10546475) + #### :pencil2: SSAFY 12๊ธฐ Data track (2024.07 - ํ˜„์žฌ ์ง„ํ–‰์ค‘) + +<<<<<<< HEAD + ### BOJ Rating + [![Solved.ac ํ”„๋กœํ•„](https://mazassumnida.wtf/api/v2/generate_badge?boj=qja1998)](https://solved.ac/qja1998) +
+
+ + ## ๐Ÿงฑ Tech Stack + ### Language + + + + +
+ + ### AI / Data + + + + + + + + +
+ + ### Infra + + + + + + ### Backend + + +
+ + + ### Tools + + + + + + + + +
+
+ + ## ๐Ÿค” Github Stats + ![](https://github-profile-summary-cards.vercel.app/api/cards/profile-details?username=qja1998&theme=nord_dark) + + ![](https://github-profile-summary-cards.vercel.app/api/cards/repos-per-language?username=qja1998&theme=nord_dark) + ![](https://github-profile-summary-cards.vercel.app/api/cards/most-commit-language?username=qja1998&theme=nord_dark) + + ![](https://github-profile-summary-cards.vercel.app/api/cards/stats?username=qja1998&theme=nord_dark) + + ## Contact + + + + + + + + + +
+======= - Dark Theme - Localized UI language - Pinned Posts on Home Page @@ -82,3 +166,4 @@ This project is published under [MIT License][license]. [lib]: https://github.com/cotes2020/chirpy-static-assets [vscode]: https://code.visualstudio.com/ [jetbrains]: https://www.jetbrains.com/?from=jekyll-theme-chirpy +>>>>>>> 54d4d59d22ac543a14bfbd9bb3d6fb6756056041 diff --git a/_config.yml b/_config.yml index d6f84c57815..421999070fb 100644 --- a/_config.yml +++ b/_config.yml @@ -8,25 +8,45 @@ theme: jekyll-theme-chirpy # otherwise, the layout language will use the default value of 'en'. lang: en +<<<<<<< HEAD + +# Change to your timezone โ€บ http://www.timezoneconverter.com/cgi-bin/findzone/findzone +timezone: Asia/Seoul +======= # Change to your timezone โ€บ https://kevinnovak.github.io/Time-Zone-Picker timezone: Asia/Shanghai +>>>>>>> 54d4d59d22ac543a14bfbd9bb3d6fb6756056041 # jekyll-seo-tag settings โ€บ https://github.com/jekyll/jekyll-seo-tag/blob/master/docs/usage.md # โ†“ -------------------------- +<<<<<<< HEAD +title: Kwon # the main title + +tagline: Studying # it will display as the sub-title +======= title: Chirpy # the main title tagline: A text-focused Jekyll theme # it will display as the subtitle +>>>>>>> 54d4d59d22ac543a14bfbd9bb3d6fb6756056041 description: >- # used by seo meta and the atom feed A minimal, responsive and feature-rich Jekyll theme for technical writing. +<<<<<<< HEAD +# fill in the protocol & hostname for your site, e.g., 'https://username.github.io' +url: 'https://qja1998.github.io' + +github: + username: qja1998 # change to your github username +======= # Fill in the protocol & hostname for your site. # E.g. 'https://username.github.io', note that it does not end with a '/'. url: "" github: username: github_username # change to your GitHub username +>>>>>>> 54d4d59d22ac543a14bfbd9bb3d6fb6756056041 twitter: username: twitter_username # change to your Twitter username @@ -34,12 +54,21 @@ twitter: social: # Change to your full name. # It will be displayed as the default author of the posts and the copyright owner in the Footer +<<<<<<< HEAD + name: GiBeom Kwon + email: qja1998@naver.com # change to your email address + links: + # The first element serves as the copyright owner's link + #- https://twitter.com/username # change to your twitter homepage + - https://github.com/qja1998 # change to your github homepage +======= name: your_full_name email: example@domain.com # change to your email address links: # The first element serves as the copyright owner's link - https://twitter.com/username # change to your Twitter homepage - https://github.com/username # change to your GitHub homepage +>>>>>>> 54d4d59d22ac543a14bfbd9bb3d6fb6756056041 # Uncomment below to add more social links # - https://www.facebook.com/username # - https://www.linkedin.com/in/username @@ -88,17 +117,28 @@ pageviews: # light โ€” Use the light color scheme # dark โ€” Use the dark color scheme # +<<<<<<< HEAD +theme_mode: light # [light|dark] +======= theme_mode: # [light | dark] +>>>>>>> 54d4d59d22ac543a14bfbd9bb3d6fb6756056041 # The CDN endpoint for media resources. # Notice that once it is assigned, the CDN url # will be added to all media resources (site avatar, posts' images, audio and video files) paths starting with '/' # # e.g. 'https://cdn.com' +<<<<<<< HEAD +img_cdn: '/assets/img' + +# the avatar on sidebar, support local or CORS resources +avatar: '/avatar/avatar.jpg' +======= cdn: "https://chirpy-img.netlify.app" # the avatar on sidebar, support local or CORS resources avatar: "/commons/avatar.jpg" +>>>>>>> 54d4d59d22ac543a14bfbd9bb3d6fb6756056041 # The URL of the site-wide social preview image used in SEO `og:image` meta tag. # It can be overridden by a customized `page.image` in front matter. @@ -108,18 +148,31 @@ social_preview_image: # string, local or CORS resources toc: true comments: +<<<<<<< HEAD + active: utterances # The global switch for posts comments, e.g., 'disqus'. Keep it empty means disable + # The active options are as follows: +======= # Global switch for the post-comment system. Keeping it empty means disabled. provider: # [disqus | utterances | giscus] # The provider options are as follows: +>>>>>>> 54d4d59d22ac543a14bfbd9bb3d6fb6756056041 disqus: shortname: # fill with the Disqus shortname. โ€บ https://help.disqus.com/en/articles/1717111-what-s-a-shortname # utterances settings โ€บ https://utteranc.es/ utterances: +<<<<<<< HEAD + repo: qja1998/qja1998.github.io # / + issue_term: # < url | pathname | title | ...> + # Giscus options โ€บ https://giscus.app + giscus: + repo: # / +======= repo: # / issue_term: # < url | pathname | title | ...> # Giscus options โ€บ https://giscus.app giscus: repo: # / +>>>>>>> 54d4d59d22ac543a14bfbd9bb3d6fb6756056041 repo_id: category: category_id: diff --git a/_posts/2022-02-04-boj_10610.markdown b/_posts/2022-02-04-boj_10610.markdown new file mode 100644 index 00000000000..6ab105150b8 --- /dev/null +++ b/_posts/2022-02-04-boj_10610.markdown @@ -0,0 +1,85 @@ +--- +title: "[BOJ] 30 - 10610 (S5)" +author: kwon +date: 2022-02-04T23:00:00 +0900 +categories: [boj, silver] +tags: [math, string, greedy algorithm, sort, number theory] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 1 ์ดˆ | 256 MB | + +# ๋ฌธ์ œ + +์–ด๋А ๋‚ , ๋ฏธ๋ฅด์ฝ”๋Š” ์šฐ์—ฐํžˆ ๊ธธ๊ฑฐ๋ฆฌ์—์„œ ์–‘์ˆ˜ N์„ ๋ณด์•˜๋‹ค. ๋ฏธ๋ฅด์ฝ”๋Š” 30์ด๋ž€ ์ˆ˜๋ฅผ ์กด๊ฒฝํ•˜๊ธฐ ๋•Œ๋ฌธ์—, ๊ทธ๋Š” ๊ธธ๊ฑฐ๋ฆฌ์—์„œ ์ฐพ์€ ์ˆ˜์— ํฌํ•จ๋œ ์ˆซ์ž๋“ค์„ ์„ž์–ด 30์˜ ๋ฐฐ์ˆ˜๊ฐ€ ๋˜๋Š” ๊ฐ€์žฅ ํฐ ์ˆ˜๋ฅผ ๋งŒ๋“ค๊ณ  ์‹ถ์–ดํ•œ๋‹ค. + +๋ฏธ๋ฅด์ฝ”๋ฅผ ๋„์™€ ๊ทธ๊ฐ€ ๋งŒ๋“ค๊ณ  ์‹ถ์–ดํ•˜๋Š” ์ˆ˜๋ฅผ ๊ณ„์‚ฐํ•˜๋Š” ํ”„๋กœ๊ทธ๋žจ์„ ์ž‘์„ฑํ•˜๋ผ. + +# ์ž…๋ ฅ + +N์„ ์ž…๋ ฅ๋ฐ›๋Š”๋‹ค. N๋Š” ์ตœ๋Œ€ 105๊ฐœ์˜ ์ˆซ์ž๋กœ ๊ตฌ์„ฑ๋˜์–ด ์žˆ์œผ๋ฉฐ, 0์œผ๋กœ ์‹œ์ž‘ํ•˜์ง€ ์•Š๋Š”๋‹ค. + +# ์ถœ๋ ฅ + +๋ฏธ๋ฅด์ฝ”๊ฐ€ ๋งŒ๋“ค๊ณ  ์‹ถ์–ดํ•˜๋Š” ์ˆ˜๊ฐ€ ์กด์žฌํ•œ๋‹ค๋ฉด ๊ทธ ์ˆ˜๋ฅผ ์ถœ๋ ฅํ•˜๋ผ. ๊ทธ ์ˆ˜๊ฐ€ ์กด์žฌํ•˜์ง€ ์•Š๋Š”๋‹ค๋ฉด, -1์„ ์ถœ๋ ฅํ•˜๋ผ. + +# ํ’€์ด + +## ์ฒซ ๋ฒˆ์งธ + +์ž…๋ ฅ ๋ฐ›์€ ์ˆ˜์— 0์ด ์—†๋‹ค๋ฉด 30์˜ ๋ฐฐ์ˆ˜๋ฅผ ์ ˆ๋Œ€ ๋งŒ๋“ค ์ˆ˜ ์—†์œผ๋ฏ€๋กœ ๋จผ์ € ์ œ์™ธํ•œ๋‹ค. 30์˜ ๋ฐฐ์ˆ˜์ธ์ง€ ํŒ๋ณ„ํ•˜๊ธฐ ์œ„ํ•ด 0์„ ํ•˜๋‚˜ ์‚ญ์ œํ•œ ํ›„์— ๊ทธ ์ˆ˜๊ฐ€ 3์˜ ๋ฐฐ์ˆ˜์ธ์ง€ ํ™•์ธํ•ด ๋ณธ๋‹ค. + +3์œผ๋กœ ๋‚˜๋ˆ„์–ด ๋–จ์–ด์ง€๋ฉด 3์˜ ๋ฐฐ์ˆ˜์ด๋ฏ€๋กœ(๋ชจ๋“  ์ž๋ฆฌ์˜ ์ˆ˜์˜ ํ•ฉ์ด 3์˜ ๋ฐฐ์ˆ˜์ด๋ฉด 3์˜ ๋ฐฐ์ˆ˜์ด๋ฏ€๋กœ ์ˆ˜์˜ ๋ฐฐ์—ด์ด ์–ด๋–ป๊ฒŒ ๋˜๋“  3์˜ ๋ฐฐ์ˆ˜์ธ์ง€ ์•„๋‹Œ์ง€ ์•Œ ์ˆ˜ ์žˆ๋‹ค) ๋‚ด๋ฆผ์ฐจ์ˆœ์œผ๋กœ ์ •๋ ฌํ•˜์—ฌ ์ •์ˆ˜๋กœ ๋งŒ๋“ ๋‹ค. ์ฒ˜์Œ์— 0์„ ํ•˜๋‚˜ ์‚ญ์ œํ–ˆ์œผ๋ฏ€๋กœ ๋‹ค์‹œ 10์„ ๊ณฑํ•ด์ค˜์•ผ ํ•œ๋‹ค. + +### ์ฝ”๋“œ + +```python +nums = list(str(sys.stdin.readline().strip())) +if '0' not in nums: print(-1) +else: + nums.remove('0') + if int(''.join(nums)) % 3 == 0: print(int(''.join(sorted(nums, reverse=True))) * 10) + else: print(-1) +``` + +## ๋‘ ๋ฒˆ์งธ + +์ž…๋ ฅ ๋ฐ›์€ ์ˆ˜์— 0์ด ์—†์„ ๊ฒฝ์šฐ ๊ฑฐ๋ฅด๋Š” ๊ฒƒ์€ ๊ฐ™๋‹ค. ๋‹ค๋ฅธ ์ ์€ ๋‚ด๋ฆผ์ฐจ์ˆœ์œผ๋กœ ์ •๋ ฌํ•œ ๋’ค์— ๊ทธ ์ˆ˜๊ฐ€ 30์˜ ๋ฐฐ์ˆ˜์ธ์ง€ ํ™•์ธํ•œ๋‹ค๋Š” ๊ฒƒ์ด๋‹ค. + +### ์ฝ”๋“œ + +```python +nums = list(str(sys.stdin.readline().strip())) +if '0' not in nums: print(-1) +else: + num = int(''.join(sorted(nums, reverse=True))) + if num % 30 == 0: print(num) + else: print(-1) +``` + +## ์„ธ ๋ฒˆ์งธ + +๊ฐ ์ž๋ฆฌ์˜ ๋ฒ”์œ„๋Š” 0~9 ์‚ฌ์ด์˜ ์ •์ˆ˜์ด๋ฏ€๋กœ counting sort๋ฅผ ์ด์šฉํ•˜์—ฌ ํ’€ ์ˆ˜ ์žˆ๋‹ค. ๊ฐ ์ž๋ฆฌ ์ˆ˜๋“ค์„ index๋กœ ํ•˜์—ฌ ๊ฐœ์ˆ˜๋ฅผ ์„ธ์–ด ์ค€ ๋‹ค์Œ, 0์„ ์ œ์™ธํ•œ ์ˆ˜๋“ค์„ ์—ญ์ˆœ์œผ๋กœ ๊ฐœ์ˆ˜์— ๋งž๊ฒŒ ๋ถ™์ด๋ฉด์„œ ๋งŒ๋“ค๋ฉด ๋‚ด๋ฆผ์ฐจ์ˆœ์œผ๋กœ ์ •๋ ฌ๋œ ์ˆ˜๋ฅผ ๋งŒ๋“ค ์ˆ˜ ์žˆ๋‹ค. + +์ด๋ฅผ 3์˜ ๋ฐฐ์ˆ˜์ธ์ง€ ํ™•์ธํ•˜๊ณ  0์˜ ๊ฐœ์ˆ˜ ๋งŒํผ์˜ 10์˜ ๊ฑฐ๋“ญ์ œ๊ณฑ์„ ๊ณฑํ•ด์ค€๋‹ค. + +```python +import sys +nums = str(sys.stdin.readline().strip()) +if '0' not in nums: + print(-1) +else: + l = [0] * (int(max(nums)) + 1) + s = '' + sum = 0 + for i in nums: + l[int(i)] += 1 + for i in range(len(l)-1, 0, -1): + s += str(i) * l[i] + sum += i * l[i] + if sum % 3 == 0: print(int(s) * (10 ** l[0])) + else: print(-1) +``` \ No newline at end of file diff --git a/_posts/2022-03-11-boj_1011.markdown b/_posts/2022-03-11-boj_1011.markdown new file mode 100644 index 00000000000..9f9778200b6 --- /dev/null +++ b/_posts/2022-03-11-boj_1011.markdown @@ -0,0 +1,67 @@ +--- +title: "[BOJ] Fly me to the Alpha Centauri - 1011 (G5)" +author: kwon +date: 2022-03-11T23:00:00 +0900 +categories: [boj, gold] +tags: [math] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 2 ์ดˆ | 512 MB | + +## ๋ฌธ์ œ + +์šฐํ˜„์ด๋Š” ์–ด๋ฆฐ ์‹œ์ ˆ, ์ง€๊ตฌ ์™ธ์˜ ๋‹ค๋ฅธ ํ–‰์„ฑ์—์„œ๋„ ์ธ๋ฅ˜๋“ค์ด ์‚ด์•„๊ฐˆ ์ˆ˜ ์žˆ๋Š” ๋ฏธ๋ž˜๊ฐ€ ์˜ค๋ฆฌ๋ผ ๋ฏฟ์—ˆ๋‹ค. ๊ทธ๋ฆฌ๊ณ  ๊ทธ๊ฐ€ ์ง€๊ตฌ๋ผ๋Š” ์„ธ์ƒ์— ๋ฐœ์„ ๋‚ด๋ ค ๋†“์€ ์ง€ 23๋…„์ด ์ง€๋‚œ ์ง€๊ธˆ, ์„ธ๊ณ„ ์ตœ์—ฐ์†Œ ASNA ์šฐ์ฃผ ๋น„ํ–‰์‚ฌ๊ฐ€ ๋˜์–ด ์ƒˆ๋กœ์šด ์„ธ๊ณ„์— ๋ฐœ์„ ๋‚ด๋ ค ๋†“๋Š” ์˜๊ด‘์˜ ์ˆœ๊ฐ„์„ ๊ธฐ๋‹ค๋ฆฌ๊ณ  ์žˆ๋‹ค. + +๊ทธ๊ฐ€ ํƒ‘์Šนํ•˜๊ฒŒ ๋  ์šฐ์ฃผ์„ ์€ Alpha Centauri๋ผ๋Š” ์ƒˆ๋กœ์šด ์ธ๋ฅ˜์˜ ๋ณด๊ธˆ์ž๋ฆฌ๋ฅผ ๊ฐœ์ฒ™ํ•˜๊ธฐ ์œ„ํ•œ ๋Œ€๊ทœ๋ชจ ์ƒํ™œ ์œ ์ง€ ์‹œ์Šคํ…œ์„ ํƒ‘์žฌํ•˜๊ณ  ์žˆ๊ธฐ ๋•Œ๋ฌธ์—, ๊ทธ ํฌ๊ธฐ์™€ ์งˆ๋Ÿ‰์ด ์—„์ฒญ๋‚œ ์ด์œ ๋กœ ์ตœ์‹ ๊ธฐ์ˆ ๋ ฅ์„ ์ด ๋™์›ํ•˜์—ฌ ๊ฐœ๋ฐœํ•œ ๊ณต๊ฐ„์ด๋™ ์žฅ์น˜๋ฅผ ํƒ‘์žฌํ•˜์˜€๋‹ค. ํ•˜์ง€๋งŒ ์ด ๊ณต๊ฐ„์ด๋™ ์žฅ์น˜๋Š” ์ด๋™ ๊ฑฐ๋ฆฌ๋ฅผ ๊ธ‰๊ฒฉํ•˜๊ฒŒ ๋Š˜๋ฆด ๊ฒฝ์šฐ ๊ธฐ๊ณ„์— ์‹ฌ๊ฐํ•œ ๊ฒฐํ•จ์ด ๋ฐœ์ƒํ•˜๋Š” ๋‹จ์ ์ด ์žˆ์–ด์„œ, ์ด์ „ ์ž‘๋™์‹œ๊ธฐ์— k๊ด‘๋…„์„ ์ด๋™ํ•˜์˜€์„ ๋•Œ๋Š” k-1 , k ํ˜น์€ k+1 ๊ด‘๋…„๋งŒ์„ ๋‹ค์‹œ ์ด๋™ํ•  ์ˆ˜ ์žˆ๋‹ค. ์˜ˆ๋ฅผ ๋“ค์–ด, ์ด ์žฅ์น˜๋ฅผ ์ฒ˜์Œ ์ž‘๋™์‹œํ‚ฌ ๊ฒฝ์šฐ -1 , 0 , 1 ๊ด‘๋…„์„ ์ด๋ก ์ƒ ์ด๋™ํ•  ์ˆ˜ ์žˆ์œผ๋‚˜ ์‚ฌ์‹ค์ƒ ์Œ์ˆ˜ ํ˜น์€ 0 ๊ฑฐ๋ฆฌ๋งŒํผ์˜ ์ด๋™์€ ์˜๋ฏธ๊ฐ€ ์—†์œผ๋ฏ€๋กœ 1 ๊ด‘๋…„์„ ์ด๋™ํ•  ์ˆ˜ ์žˆ์œผ๋ฉฐ, ๊ทธ ๋‹ค์Œ์—๋Š” 0 , 1 , 2 ๊ด‘๋…„์„ ์ด๋™ํ•  ์ˆ˜ ์žˆ๋Š” ๊ฒƒ์ด๋‹ค. ( ์—ฌ๊ธฐ์„œ ๋‹ค์‹œ 2๊ด‘๋…„์„ ์ด๋™ํ•œ๋‹ค๋ฉด ๋‹ค์Œ ์‹œ๊ธฐ์—” 1, 2, 3 ๊ด‘๋…„์„ ์ด๋™ํ•  ์ˆ˜ ์žˆ๋‹ค. ) + +!https://www.acmicpc.net/upload/201003/rlaehdgur.JPG + +๊น€์šฐํ˜„์€ ๊ณต๊ฐ„์ด๋™ ์žฅ์น˜ ์ž‘๋™์‹œ์˜ ์—๋„ˆ์ง€ ์†Œ๋ชจ๊ฐ€ ํฌ๋‹ค๋Š” ์ ์„ ์ž˜ ์•Œ๊ณ  ์žˆ๊ธฐ ๋•Œ๋ฌธ์— x์ง€์ ์—์„œ y์ง€์ ์„ ํ–ฅํ•ด ์ตœ์†Œํ•œ์˜ ์ž‘๋™ ํšŸ์ˆ˜๋กœ ์ด๋™ํ•˜๋ ค ํ•œ๋‹ค. ํ•˜์ง€๋งŒ y์ง€์ ์— ๋„์ฐฉํ•ด์„œ๋„ ๊ณต๊ฐ„ ์ด๋™์žฅ์น˜์˜ ์•ˆ์ „์„ฑ์„ ์œ„ํ•˜์—ฌ y์ง€์ ์— ๋„์ฐฉํ•˜๊ธฐ ๋ฐ”๋กœ ์ง์ „์˜ ์ด๋™๊ฑฐ๋ฆฌ๋Š” ๋ฐ˜๋“œ์‹œ 1๊ด‘๋…„์œผ๋กœ ํ•˜๋ ค ํ•œ๋‹ค. + +๊น€์šฐํ˜„์„ ์œ„ํ•ด x์ง€์ ๋ถ€ํ„ฐ ์ •ํ™•ํžˆ y์ง€์ ์œผ๋กœ ์ด๋™ํ•˜๋Š”๋ฐ ํ•„์š”ํ•œ ๊ณต๊ฐ„ ์ด๋™ ์žฅ์น˜ ์ž‘๋™ ํšŸ์ˆ˜์˜ ์ตœ์†Ÿ๊ฐ’์„ ๊ตฌํ•˜๋Š” ํ”„๋กœ๊ทธ๋žจ์„ ์ž‘์„ฑํ•˜๋ผ. + +## ์ž…๋ ฅ + +์ž…๋ ฅ์˜ ์ฒซ ์ค„์—๋Š” ํ…Œ์ŠคํŠธ์ผ€์ด์Šค์˜ ๊ฐœ์ˆ˜ T๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. ๊ฐ๊ฐ์˜ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์— ๋Œ€ํ•ด ํ˜„์žฌ ์œ„์น˜ x ์™€ ๋ชฉํ‘œ ์œ„์น˜ y ๊ฐ€ ์ •์ˆ˜๋กœ ์ฃผ์–ด์ง€๋ฉฐ, x๋Š” ํ•ญ์ƒ y๋ณด๋‹ค ์ž‘์€ ๊ฐ’์„ ๊ฐ–๋Š”๋‹ค. $(0 โ‰ค x < y < 2^{31})$ + +## ์ถœ๋ ฅ + +๊ฐ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์— ๋Œ€ํ•ด x์ง€์ ์œผ๋กœ๋ถ€ํ„ฐ y์ง€์ ๊นŒ์ง€ ์ •ํ™•ํžˆ ๋„๋‹ฌํ•˜๋Š”๋ฐ ํ•„์š”ํ•œ ์ตœ์†Œํ•œ์˜ ๊ณต๊ฐ„์ด๋™ ์žฅ์น˜ ์ž‘๋™ ํšŸ์ˆ˜๋ฅผ ์ถœ๋ ฅํ•œ๋‹ค. + +## ํ’€์ด + +์ตœ์†Œ ํšŸ์ˆ˜๋กœ ๋ชฉํ‘œ์— ๋„๋‹ฌํ•˜๋ ค๋ฉด ํ•ญ์ƒ ์ค‘๊ฐ„ ๋ถ€๋ถ„์—์„œ ์ตœ๋Œ€ ๊ฑฐ๋ฆฌ๋กœ ๊ณต๊ฐ„์ด๋™์„ ํ•ด์•ผ ํ•œ๋‹ค. ๊ทธ๋Ÿฌ๋ฏ€๋กœ ์ค‘๊ฐ„ ์ง€์ ๊นŒ์ง€๋Š” 1์”ฉ ๋Š˜๋ ค๊ฐ€๋ฉด์„œ ์ด๋™ํ•˜๋‹ค๊ฐ€ ์ค‘๊ฐ„์„ ์ง€๋‚˜๋Š” ์ˆœ๊ฐ„๋ถ€ํ„ฐ 1์”ฉ ์ค„์ด๋ฉด์„œ ์ด๋™ํ•˜๋ฉด ๋œ๋‹ค. ํ•˜์ง€๋งŒ x์™€ y ์‚ฌ์ด์˜ ๊ฑฐ๋ฆฌ๊ฐ€ ์ตœ๋Œ€ $2^{31}$๊นŒ์ง€ ๋Š˜์–ด๋‚  ์ˆ˜ ์žˆ์œผ๋ฏ€๋กœ ์ด๋™์„ ๋ชจ๋‘ ํ•ด๋ณด๋Š” ๊ฒƒ์€ ๋ถ€์ ํ•ฉํ•˜๋‹ค. + +๊ทธ๋ž˜์„œ ๋“ฑ์ฐจ์ˆ˜์—ด์˜ ํ•ฉ์„ ์ด์šฉํ•œ๋‹ค. ์ค‘๊ฐ„๊นŒ์ง€ ์ด๋™ํ•˜๋Š” ํšŸ์ˆ˜๋ฅผ $n$์ด๋ผ๊ณ  ํ–ˆ์„ ๋•Œ ์ด ์ด๋™ ๊ฑฐ๋ฆฌ๋Š” ์‹œ์ž‘ํ•ญ์ด 1, ๋งˆ์ง€๋ง‰ํ•ญ์ด $n+1$์ด๊ณ  ๊ณต์ฐจ๊ฐ€ 1์ธ ๋“ฑ์ฐจ์ˆ˜์—ด์˜ ํ•ฉ์œผ๋กœ ์ƒ๊ฐํ•  ์ˆ˜ ์žˆ๋‹ค. ๊ทธ๋ฆฌ๊ณ  ์ด ๋•Œ ํ•ฉ์€ $n(n+1)/2$๋กœ ๊ณ„์‚ฐํ•˜๋ฉด ๋œ๋‹ค. ์ด๊ฒƒ์„ ์—ผ๋‘์— ๋‘๊ณ  ๊ฐ€๋Šฅํ•œ ์ƒํ™ฉ์„ ์ƒ๊ฐํ•ด๋ณด๋ฉด ๋ฌธ์ œ๋ฅผ ํ’€ ์ˆ˜ ์žˆ๋‹ค. + +์—ฌ๋Ÿฌ ์ƒํ™ฉ์„ ๊ฐ€์ •ํ•ด๋ณด๊ธฐ ์œ„ํ•ด $n$์„ ๊ฑฐ๋ฆฌ์˜ ์ œ๊ณฑ๊ทผ์— ๋ฒ„๋ฆผ์„ ์ทจํ•œ ์ •์ˆ˜๋ผ๊ณ  ํ•˜๊ณ  ์ƒ๊ฐํ•ด๋ณด์ž. + +๋จผ์ € ์ค‘๊ฐ„์„ ๊ธฐ์ค€์œผ๋กœ ์–‘ ์ชฝ์˜ ํšŸ์ˆ˜๊ฐ€ ๊ฐ™์€ ๊ฒฝ์šฐ์—๋Š” ์–‘์ชฝ ๋ชจ๋‘๊ฐ€ $n(n+1)/2$๋กœ ๊ณ„์‚ฐ์ด ๋˜์–ด์•ผ ํ•˜๋ฉฐ ์ด ๋•Œ ์ด ํšŸ์ˆ˜๋Š” $2n$์ด๊ณ , ๊ฑฐ๋ฆฌ๊ฐ€ ์ •ํ™•ํžˆ $n(n+1)$์ผ ๋•Œ ๊ฐ€๋Šฅํ•˜๋‹ค. + +๋‹ค์Œ์€ ํ•œ ์ชฝ์˜ ํšŸ์ˆ˜๊ฐ€ ํ•˜๋‚˜ ๋งŽ์€ ๊ฒฝ์šฐ์ด๋‹ค. ์ด ๊ฒฝ์šฐ ํ•œ ์ชฝ์€ $n(n+1)/2$์ด๊ณ  ๋ฐ˜๋Œ€์ชฝ์€ $n(n+2)/2$๋กœ ๊ณ„์‚ฐ์ด ๋˜์–ด์•ผ ํ•œ๋‹ค. ์ด ๋•Œ ์ด ํšŸ์ˆ˜๋Š” $2n+1$์ด๊ณ , ๊ฑฐ๋ฆฌ๊ฐ€ $n(n+1)$๋ณด๋‹ค ํด ๋•Œ ๊ฐ€๋Šฅํ•˜๋‹ค. + +๋งˆ์ง€๋ง‰์œผ๋กœ๋Š” ํ•œ ์ชฝ์˜ ํšŸ์ˆ˜๊ฐ€ ํ•˜๋‚˜ ์ ์€ ๊ฒฝ์šฐ์ด๋‹ค. ์ด ๊ฒฝ์šฐ $n(n+1)/2$์ด๊ณ  ๋ฐ˜๋Œ€์ชฝ์€ $n(n-1)/2$๋กœ ๊ณ„์‚ฐ์ด ๋˜์–ด์•ผ ํ•œ๋‹ค. ์ด ๋•Œ ์ด ํšŸ์ˆ˜๋Š” $2n-1$์ด๊ณ , ๊ฑฐ๋ฆฌ๊ฐ€ $n^2$์ผ ๋•Œ ๊ฐ€๋Šฅํ•˜๋‹ค. + +๋งŒ์•ฝ ์œ„ ๊ฒฝ์šฐ์— ํฌํ•จ๋˜์ง€ ์•Š์œผ๋ฉด์„œ ๊ฑฐ๋ฆฌ๊ฐ€ $n(n+1)$๋ณด๋‹ค ํฌ๋‹ค๋ฉด $n$์˜ ํฌ๊ธฐ ์ž์ฒด๊ฐ€ ๋ถ€์กฑํ•œ ๊ฒƒ์ด๊ธฐ ๋•Œ๋ฌธ์— ๊ณ ๋ คํ•˜์ง€ ์•Š๋Š”๋‹ค. + +### ์ฝ”๋“œ +```python +import sys, math + +i = int(sys.stdin.readline()) +for _ in range(i): + n, m = map(int, sys.stdin.readline().split()) + N = m - n + r_N = math.sqrt(N) + N_int = math.trunc(r_N) + if r_N == N_int: print(N_int * 2 - 1) + else: + if N > N_int * (N_int + 1): + print(N_int * 2 + 1) + else: + print(N_int * 2) +``` \ No newline at end of file diff --git a/_posts/2022-04-14-dlZeroToAll-PyTorch-1.markdown b/_posts/2022-04-14-dlZeroToAll-PyTorch-1.markdown new file mode 100644 index 00000000000..b629df9d865 --- /dev/null +++ b/_posts/2022-04-14-dlZeroToAll-PyTorch-1.markdown @@ -0,0 +1,627 @@ +--- +title: "๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹ 2 - Lab1: Tensor Manipulation" +author: Kwon +date: 2022-04-14T16:50:00+0900 +categories: [pytorch, study] +tags: [basic, tensor-manipulation] +math: true +mermaid: false +--- + +[๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹](https://deeplearningzerotoall.github.io/season2/lec_pytorch.html) Lab 1: Tensor Manipulation ๊ฐ•์˜๋ฅผ ๋ณธ ํ›„ ๊ณต๋ถ€๋ฅผ ๋ชฉ์ ์œผ๋กœ ์ž‘์„ฑํ•œ ๊ฒŒ์‹œ๋ฌผ์ž…๋‹ˆ๋‹ค. + +*** +## 1. Vector, Matrix and Tensor +![](/posting_imgs/lab1-2.jpg) +

+Vector, Matrix, Tensor์˜ ํ‘œํ˜„์€ ์œ„ ๊ทธ๋ฆผ๊ณผ ๊ฐ™์ด ๋‚˜ํƒ€๋‚ผ ์ˆ˜ ์žˆ๋‹ค. + +๊ฐ ๊ตฌ์กฐ๋“ค์˜ ํฌ๊ธฐ๋Š” ์ฐจ์›๋“ค์˜ ํฌ๊ธฐ์˜ ๊ณฑ์œผ๋กœ ๊ตฌํ•  ์ˆ˜ ์žˆ๋Š”๋ฐ +

+![](/posting_imgs/lab1-2.jpg) +

+Metrix์™€ Tonsor์˜ ํฌ๊ธฐ(t)๋Š” ์œ„์™€ ๊ฐ™์ด ๊ตฌํ•  ์ˆ˜ ์žˆ๋‹ค. + +๋˜ํ•œ ์œ„ ๊ทธ๋ฆผ์˜ ์ฐจ์›์˜ ์ˆœ์„œ๋Š” ํŒŒ์ดํ† ์น˜์—์„œ ์ฐจ์›์„ ํ‘œํ˜„ํ•  ๋•Œ์˜ ์ˆœ์„œ์™€ ๊ฐ™๋‹ค. +์ฆ‰, ์•„๋ž˜์™€ ๊ฐ™์€ ์ˆœ์„œ๋Œ€๋กœ ํ‘œํ˜„ํ•˜๋ฉด ๋œ๋‹ค. +

+![](/posting_imgs/lab1-3.jpg) +

+ํฌ๊ธฐ๋ฅผ ๊ตฌํ•  ๋•Œ ๋ดค๋˜ ์ˆœ์„œ๋ฅผ ๋‹ค์‹œ ํ™•์ธํ•ด ๋ณด๋ฉด Vector๋Š” (batch size, dim)์ˆœ, Tensor๋Š” (batch size, length, dim)์ˆœ์œผ๋กœ ์œ„ ๊ทธ๋ฆผ๊ณผ ๊ฐ™์€ ์ˆœ์„œ๋กœ ํ‘œํ˜„ํ•œ ๊ฒƒ์„ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. +

+ +*** +## Import +{% highlight python %} +import numpy as np +import torch +{% endhighlight %} + +## 2. Array with PyTorch +#### 1D Array +{% highlight python %} +t = torch.FloatTensor([0., 1., 2., 3., 4., 5., 6.]) +print(t) + +''' output +tensor([0., 1., 2., 3., 4., 5., 6.]) +''' +{% endhighlight %} + +1์ฐจ์› ๋ฐฐ์—ด์˜ ๊ฒฝ์šฐ ์œ„์™€ ๊ฐ™์ด FloatTensor๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ๋ฐฐ์—ด์„ PyTorch์˜ 1์ฐจ์› ์‹ค์ˆ˜ํ˜• tensor๋กœ ๋ณ€ํ™˜ํ•  ์ˆ˜ ์žˆ๋‹ค. +

+#### 2D Array +{% highlight python %} +t = torch.FloatTensor([[1., 2., 3.], + [4., 5., 6.], + [7., 8., 9.], + [10., 11., 12.] + ]) +print(t) + +''' output +tensor([[ 1., 2., 3.], + [ 4., 5., 6.], + [ 7., 8., 9.], + [10., 11., 12.]]) +'''' +{% endhighlight %} + +2์ฐจ์› ๋ฐฐ์—ด์˜ ๊ฒฝ์šฐ์—๋„ ๋งˆ์ฐฌ๊ฐ€์ง€๋กœ FloatTensor๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ๋ฐฐ์—ด์„ PyTorch์˜ 2์ฐจ์› ์‹ค์ˆ˜ํ˜• tensor๋กœ ๋ณ€ํ™˜ํ•  ์ˆ˜ ์žˆ๋‹ค. + +์œ„ ์˜ˆ์ œ๋“ค์—์„œ๋Š” ์‹ค์ˆ˜ํ˜• tensor๋งŒ์„ ์˜ˆ๋กœ ๋“ค์—ˆ์ง€๋งŒ ๋‹ค๋ฅธ ์ž๋ฃŒํ˜•๋“ค์˜ ๊ฒฝ์šฐ๋„ ๋‹ค์Œ๊ณผ ๊ฐ™์ด tensor ์ƒ์„ฑ์ด ๊ฐ€๋Šฅํ•˜๋‹ค. + +๊ฐ type๋“ค์— ๋Œ€ํ•œ tensor ์ƒ์„ฑ ๋ฐฉ๋ฒ•์€ ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. + +{% highlight python %} +# dtype: torch.float32 or torch.float (32-bit floating point) +torch.FloatTensor() + +# dtype: torch.float64 or torch.double (64-bit floating point) +torch.FloatTensor() + +# dtype: torch.uint8 (8-bit integer (unsigned)) +torch.ByteTensor() + +# dtype: torch.int8 (8-bit integer (signed)) +torch.CharTensor() + +# dtype: torch.int16 (16-bit integer (signed)) +torch.ShortTensor() + +# dtype: torch.int32 (32-bit integer (signed)) +torch.IntTensor() + +# dtype: torch.int64 (64-bit integer (signed)) +torch.LongTensor() + +# dtype: torch.bool +torch.BoolTensor() +{% endhighlight %} + +์œ„์™€ ๊ฐ™์ด ํƒ€์ž…์„ ์ง€์ •ํ•˜์—ฌ tensor๋ฅผ ์ƒ์„ฑํ•ด๋„ ๋˜์ง€๋งŒ ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์ƒ์„ฑํ•  ์ˆ˜๋„ ์žˆ๋‹ค. + +{% highlight python %} +torch.tensor([[1., -1.], [1., -1.]]) + +''' output +tensor([[ 1.0000, -1.0000], + [ 1.0000, -1.0000]]) +''' +{% endhighlight %} +

+ +*** +## 3. Frequently Used Operations in PyTorch +### Broadcasting +์„œ๋กœ shape์ด ๋‹ค๋ฅธ tensor๊ฐ„์˜ ์—ฐ์‚ฐ์„ ํ•  ๋•Œ ์ž๋™์œผ๋กœ ๋‘˜ ์ค‘ ๋” ์ž‘์€ shape์˜ tensor๋ฅผ ๋” shape์ด ํฐ tensor์˜ shape์œผ๋กœ ๋งž์ถฐ์ฃผ์–ด(broadcast) ๊ณ„์‚ฐํ•ด ์ฃผ๋Š” ๊ฒƒ์„ ๋งํ•œ๋‹ค. +

+##### Same shape +{% highlight python %} +m1 = torch.FloatTensor([[3, 3]]) +m2 = torch.FloatTensor([[2, 2]]) +print(m1 + m2) + +''' output +tensor([[5., 5.]]) +''' +{% endhighlight %} + +๊ฐ™์€ shape์„ ๊ฐ€์ง€๋Š” tensor ๊ฐ„์˜ ์—ฐ์‚ฐ์€ ์œ„ ์ฝ”๋“œ์˜ ๊ฒฐ๊ณผ์ฒ˜๋Ÿผ ๊ฐ™์€ ์ž๋ฆฌ์˜ ์›์†Œ๋ผ๋ฆฌ ๊ณ„์‚ฐํ•ด ์ฃผ๋ฉด ๋œ๋‹ค. +

+##### Vector + Scalar +{% highlight python %} +m1 = torch.FloatTensor([[1, 2]]) +m2 = torch.FloatTensor([3]) # 3 -> [[3, 3]] +print(m1 + m2) + +''' output +tensor([[4., 5.]]) +''' +{% endhighlight %} + +์ด๋ฒˆ์—๋Š” vector์™€ scalar์˜ ํ•ฉ์ด๋‹ค. ์ด๋Ÿด ๋•Œ `[3]`์ด `[[3, 3]]`์œผ๋กœ **boradcast** ๋˜์–ด shape์„ ๋™์ผํ•˜๊ฒŒ ๋งŒ๋“  ํ›„ ์—ฐ์‚ฐ์„ ์ง„ํ–‰ํ•˜๊ฒŒ ๋œ๋‹ค. ๊ทธ๋ž˜์„œ `[[1, 2]] + [[3, 3]]`์˜ ๊ฒฐ๊ณผ์ธ `[[4, 5]]`๊ฐ€ ๋‚˜์˜จ๋‹ค. +

+##### (2 x 1) Vector + (1 X 2) Vector +{% highlight python %} +m1 = torch.FloatTensor([[1, 2]]) # [[1, 2]] -> [[1, 2], [1, 2]] +m2 = torch.FloatTensor([[3], [4]]) # [[3], [4]] -> [[3, 3], [4, 4]] +print(m1 + m2) + +''' output +tensor([[4., 5.], + [5., 6.]]) +''' +{% endhighlight %} +์„œ๋กœ shape์ด ๋‹ค๋ฅธ vector๊ฐ„์˜ ์—ฐ์‚ฐ์˜ ๊ฒฝ์šฐ์—๋„ ์ž‘์€ ์ฐจ์›์„ ํฐ ์ฐจ์›์œผ๋กœ ๋งž์ถ˜ ํ›„ ์—ฐ์‚ฐ์„ ํ•œ๋‹ค. + +์œ„ ์ฝ”๋“œ์—์„œ๋Š” `[[1, 2]]`์ด `[[1, 2], [1, 2]]`๋กœ `[[3], [4]]`๊ฐ€ `[[3, 3], [4, 4]]`๋กœ ๊ฐ๊ฐ **broadcast**๋˜์–ด ์—ฐ์‚ฐ์„ ์ง„ํ–‰ํ•˜๊ธฐ ๋•Œ๋ฌธ์— ๊ทธ ๊ฒฐ๊ณผ๊ฐ’์€ `[[4, 5], [5, 6]]`์ด ๋‚˜์˜จ๋‹ค. +

+ +### Mul vs. Matmul +๋‘˜๋‹ค tensor์˜ ๊ณฑ์—ฐ์‚ฐ์„ ํ•ด ์ฃผ๋ฉฐ, ์‚ฌ์šฉํ•˜๋Š” ํ˜•์‹๋„ `m1.matmul(m2)`, `m1.mul(m2)`๋กœ ๋™์ผํ•˜์ง€๋งŒ ์ฐจ์ด์ ์€ ์•ž์„œ ์–ธ๊ธ‰ํ•œ broadcasting ์—ฌ๋ถ€์— ์žˆ๋‹ค. +{% highlight python %} +# Without broadcasting +m1 = torch.FloatTensor([[1, 2], [3, 4]]) +m2 = torch.FloatTensor([[1], [2]]) +print('Shape of Matrix 1: ', m1.shape) # 2 x 2 +print('Shape of Matrix 2: ', m2.shape) # 2 x 1 +print(m1.matmul(m2)) # 2 x 1 + +# With broadcasting +m1 = torch.FloatTensor([[1, 2], [3, 4]]) +m2 = torch.FloatTensor([[1], [2]]) +print('Shape of Matrix 1: ', m1.shape) # 2 x 2 +print('Shape of Matrix 2: ', m2.shape) # 2 x 1 ([[1], [2]]) -> 2 x 2 ([[1, 1], [2, 2]]) +print(m1 * m2) # 2 x 2 +print(m1.mul(m2)) + +''' output +Shape of Matrix 1: torch.Size([2, 2]) +Shape of Matrix 2: torch.Size([2, 1]) +tensor([[ 5.], + [11.]]) +Shape of Matrix 1: torch.Size([2, 2]) +Shape of Matrix 2: torch.Size([2, 1]) +tensor([[1., 2.], + [6., 8.]]) +tensor([[1., 2.], + [6., 8.]]) +''' +{% endhighlight %} + +`matmul`์˜ ๊ฒฝ์šฐ broadcasting ์—†์ด ํ–‰๋ ฌ ๊ณฑ์…ˆ์„ ํ•œ๋‹ค. ๊ทธ๋Ÿฌ๋ฏ€๋กœ ๊ฐ tonsor์˜ shape ๋ณ€ํ™” ์—†์ด ๊ทธ๋Œ€๋กœ ๊ณฑํ•œ๋‹ค. + +๋ฐ˜๋ฉด `mul`์€ broadcasting์„ ํ•˜๊ณ  ํ–‰๋ ฌ์˜ ๊ฐ ์›์†Œ๋“ค์„ ๊ณฑํ•œ๋‹ค. ์ด ๋•Œ๋ฌธ์— `m2`์˜ shape์ด 2 x 1์—์„œ 2 x 2๋กœ `m1`์˜ shape์— ๋งž์ถฐ์ง„ ํ›„์— ๊ฐ ์ž๋ฆฌ์˜ ์›์†Œ๋ผ๋ฆฌ ๊ณฑ์…ˆ์ด ๊ณ„์‚ฐ๋œ ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. +

+ +### Mean +ํ‰๊ท ์„ ๊ณ„์‚ฐํ•ด ์ค€๋‹ค. +{% highlight python %} +t = torch.FloatTensor([[1, 2], [3, 4]]) +print(t) + +''' output +tensor([[1., 2.], + [3., 4.]]) +''' + +print(t.mean()) +print(t.mean(dim=0)) # ์ฒซ ๋ฒˆ์งธ ์ฐจ์› ํ‰๊ท  +print(t.mean(dim=1)) # ๋‘ ๋ฒˆ์งธ ์ฐจ์› ํ‰๊ท  +print(t.mean(dim=-1)) # ๋งˆ์ง€๋ง‰ ์ฐจ์› ํ‰๊ท  + +''' output +tensor(2.5000) +tensor([2., 3.]) +tensor([1.5000, 3.5000]) +tensor([1.5000, 3.5000]) +''' +{% endhighlight %} + +๋งค๊ฐœ๋ณ€์ˆ˜๋กœ `dim`์„ ์ž…๋ ฅํ•  ์ˆ˜ ์žˆ๋‹ค. ์•„๋ฌด๊ฒƒ๋„ ์ž…๋ ฅํ•˜์ง€ ์•Š์€ ๊ฒฝ์šฐ์—๋Š” tonsor์— ํฌํ•จ๋œ ๊ฐ’ ์ „์ฒด๋ฅผ, ์ฐจ์›์„ ์ง€์ •ํ•ด ์ค€ ๊ฒฝ์šฐ ๊ทธ ์ฐจ์›์˜ ๊ฐ’๋“ค๋กœ ํ‰๊ท ์„ ๊ณ„์‚ฐํ•˜์—ฌ tensor๋ฅผ ๋ฐ˜ํ™˜ํ•œ๋‹ค. + +`dim=0`์ด๋ฉด ์ฒซ ๋ฒˆ์งธ ์ฐจ์›(์„ธ๋กœ)์„ ๊ธฐ์ค€์œผ๋กœ ํ‰๊ท ์„ ๊ณ„์‚ฐํ•˜๊ณ , `dim=1`์ด๋ฉด ๋‘ ๋ฒˆ์งธ ์ฐจ์›(๊ฐ€๋กœ)์„, `dim=-1`์ด๋ฉด ๋งˆ์ง€๋ง‰ ์ฐจ์›(์—ฌ๊ธฐ์„  ๋‘ ๋ฒˆ์งธ ์ฐจ์›)์„ ๊ธฐ์ค€์œผ๋กœ ํ‰๊ท ์„ ๊ณ„์‚ฐํ•œ๋‹ค. +

+ +### Sum +ํ•ฉ์„ ๊ณ„์‚ฐํ•ด ์ค€๋‹ค. ํ‰๊ท ๊ณผ ๊ธฐ๋ณธ์ ์œผ๋กœ ์‚ฌ์šฉ๋ฒ•์ด ๊ฐ™๋‹ค. ์ฐจ์ด์ ์€ ๊ฒฐ๊ณผ๊ฐ’์œผ๋กœ ํ•ฉ์„ ๋ฐ˜ํ™˜ํ•œ๋‹ค๋Š” ๊ฒƒ. + +{% highlight python %} +t = torch.FloatTensor([[1, 2], [3, 4]]) +print(t) + +''' output +tensor([[1., 2.], + [3., 4.]]) +''' + +print(t.sum()) +print(t.sum(dim=0)) # ์ฒซ ๋ฒˆ์งธ ์ฐจ์› ํ•ฉ +print(t.sum(dim=1)) # ๋‘ ๋ฒˆ์งธ ์ฐจ์› ํ•ฉ +print(t.sum(dim=-1)) # ๋งˆ์ง€๋ง‰ ์ฐจ์› ํ•ฉ + +''' output +tensor(10.) +tensor([4., 6.]) +tensor([3., 7.]) +tensor([3., 7.]) +''' +{% endhighlight %} +

+ +### Max and Argmax +max๋Š” ์ตœ๋Œ€๊ฐ’์„ argmax๋Š” ์ตœ๋Œ€๊ฐ’์˜ index๋ฅผ ์˜๋ฏธํ•œ๋‹ค. + +{% highlight python %} +t = torch.FloatTensor([[1, 2], [3, 4]]) +print(t) + +''' output +tensor([[1., 2.], + [3., 4.]]) +''' + +print(t.max()) # Returns one value: max + +''' output +tensor(4.) +''' +{% endhighlight %} + +๋งค๊ฐœ๋ณ€์ˆ˜๋ฅผ ๋„ฃ์ง€ ์•Š์œผ๋ฉด ์ „์ฒด์—์„œ ์ตœ๋Œ€๊ฐ’์„ ์ฐพ์•„ ๊ทธ ๊ฐ’์„ ๋ฐ˜ํ™˜ํ•œ๋‹ค. +
+ +{% highlight python %} +print(t.max(dim=0)) # Returns two values: max and argmax (value amd index) +print('Max: ', t.max(dim=0)[0]) +print('Argmax: ', t.max(dim=0)[1]) + +''' output +(tensor([3., 4.]), tensor([1, 1])) +Max: tensor([3., 4.]) +Argmax: tensor([1, 1]) +''' + +print(t.max(dim=1)) +print(t.max(dim=-1)) + +''' output +(tensor([2., 4.]), tensor([1, 1])) +(tensor([2., 4.]), tensor([1, 1])) +''' +{% endhighlight %} +`dim`์„ ์ง€์ •ํ•˜๋ฉด ํ•ด๋‹น ์ฐจ์›์—์„œ์˜ ์ตœ๋Œ€๊ฐ’๊ณผ ๊ทธ ์ฐจ์›์—์„œ ์ตœ๋Œ€๊ฐ’์˜ ์œ„์น˜๋ฅผ tuple ํ˜•ํƒœ๋กœ ๋ฐ˜ํ™˜ํ•œ๋‹ค. + +์œ„์˜ ๊ฒฝ์šฐ `dim=0`(์ฒซ ๋ฒˆ์งธ ์ฐจ์› - ์—ด)์„ ๊ธฐ์ค€์œผ๋กœ ์ตœ๋Œ€๊ฐ’์ธ 3, 4์™€ ๊ทธ ๊ฐ’๋“ค์˜ index์ธ 1, 1์ด ๋ฐ˜ํ™˜๋˜๋Š” ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. + +`dim=1`์ธ ๊ฒฝ์šฐ์—๋„ ๊ธฐ์ค€์ด ๋˜๋Š” ์ฐจ์›๋งŒ ๋‹ฌ๋ผ์ง€๊ณ  ๊ฐ™์€ ๋ฐฉ์‹์œผ๋กœ `(max, argmax)`๋ฅผ ๋ฐ˜ํ™˜ํ•œ๋‹ค. + +๋งŒ์•ฝ argmax ๊ฐ’๋งŒ ํ•„์š”ํ•˜๋‹ค๋ฉด ์•„๋ž˜์™€ ๊ฐ™์ด `torch.argmax()`๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ๊ฐ’์„ ์–ป์„ ์ˆ˜ ์žˆ๋‹ค. +{% highlight python %} +print(t.argmax(dim=0)) +print(t.argmax(dim=1)) +print(t.argmax(dim=-1)) + +''' output +tensor([1, 1]) +tensor([1, 1]) +tensor([1, 1]) +''' +{% endhighlight %} +

+ +### View +numpy์˜ reshape๊ณผ ๊ฐ™์€ ์—ญํ• ์„ ํ•œ๋‹ค. +{% highlight python %} +t = np.array([[[0, 1, 2], + [3, 4, 5]], + + [[6, 7, 8], + [9, 10, 11]]]) +ft = torch.FloatTensor(t) +print(ft.shape) + +''' output +torch.Size([2, 2, 3]) +''' + +print(ft.view([-1, 3])) +print(ft.view([-1, 3]).shape) + +''' output +tensor([[ 0., 1., 2.], + [ 3., 4., 5.], + [ 6., 7., 8.], + [ 9., 10., 11.]]) +''' + +# -1์€ ๋ณดํ†ต ๊ฐ€์žฅ ๋ณ€๋™์ด ์‹ฌํ•œ batch size ๋“ฑ(๊ณ„์‚ฐ ์‹ค์ˆ˜๊ฐ€ ๋งŽ์ด ์ผ์–ด๋‚  ๋งŒํ•œ ๊ณณ)์— ์‚ฌ์šฉ +# view(reshape) ํ•˜๋ ค๋Š” ๊ฒฐ๊ณผ ์ฐจ์›์˜ ๊ณฑ์ด ์ฒ˜์Œ ์ฐจ์›๋“ค์˜ ๊ณฑ๊ณผ ๊ฐ™์•„์•ผ ์‚ฌ์šฉ ๊ฐ€๋Šฅ +print(ft.view([-1, 1, 3])) +print(ft.view([-1, 1, 3]).shape) + +''' output +tensor([[[ 0., 1., 2.]], + + [[ 3., 4., 5.]], + + [[ 6., 7., 8.]], + + [[ 9., 10., 11.]]]) +''' +{% endhighlight %} +shape์ด `[2, 2, 3]`์ธ tensor๋ฅผ ๋‹ค๋ฅธ shape๋“ค๋กœ ๋ณ€๊ฒฝํ•˜๋Š” ์˜ˆ์ œ์ด๋‹ค. + +์ด๋•Œ ์ฃผ์˜ํ•  ์ ์€ shape์„ ๋ณ€๊ฒฝํ•  ๋Œ€์ƒ๊ณผ ๋ณ€๊ฒฝํ•œ ํ›„์˜ shape์˜ ๊ฐ ์ฐจ์›์˜ ๊ณฑ์ด ๊ฐ™์•„์•ผ ํ•œ๋‹ค๋Š” ๊ฒƒ์ด๋‹ค. ์œ„์™€ ๊ฐ™์ด shape์ด `[2, 2, 3]`์ด๋ผ๋ฉด ๋ณ€๊ฒฝ๋œ ์ดํ›„์˜ ์ฐจ์›๋“ค์˜ ๊ณฑ๋„ 2x2x3 = 12์—ฌ์•ผ ํ•œ๋‹ค. + +๋งŽ์€ ๊ฒฝ์šฐ ์œ„์™€ ๊ฐ™์ด ํ•œ ์ฐจ์›์— -1์„ ๋„ฃ์–ด ์ฐจ์›์„ ์ž๋™์œผ๋กœ ๋งž์ถฐ์ฃผ๋„๋ก ํ•˜๋Š”๋ฐ, ๋ณดํ†ต ๋ณ€๋™์ด ๊ฐ€์žฅ ์‹ฌํ•œ batch size ๋“ฑ์— -1์„ ์ ์šฉํ•˜์—ฌ ์‚ฌ์šฉํ•œ๋‹ค. + +์ด๋ ‡๊ฒŒ ์‚ฌ์šฉํ•˜๋Š” ๊นŒ๋‹ญ์€ ๋ณ€๋™์ด ์‹ฌํ•˜๊ฑฐ๋‚˜ ๊ฐ’์ด ํฐ ๊ฒฝ์šฐ ๋งค๋ฒˆ ๊ณ„์‚ฐํ•˜๋Š” ๊ฒƒ์ด ๋ถˆํŽธํ•˜๊ธฐ๋„ ํ•˜๊ณ  ๊ณ„์‚ฐํ•˜๋Š” ๊ณผ์ •์—์„œ ์‹ค์ˆ˜๊ฐ€ ๋ฐœ์ƒํ•  ์ˆ˜๋„ ์žˆ๊ธฐ ๋•Œ๋ฌธ์ด๋‹ค. +

+ +### Squeeze +๊ฑธ๋ ˆ๋ฅผ ์งœ์„œ ๋ฌผ์„ ๋นผ๋‚ด๋Š” ๊ฒƒ๊ณผ ๊ฐ™์ด ์ฐจ์›์ด 1์ธ ๊ฒƒ์„ ๋ชจ๋‘ ์‚ญ์ œํ•ด ์ค€๋‹ค. +{% highlight python %} +ft = torch.FloatTensor([[0], [1], [2]]) +print(ft) +print(ft.shape) + +''' output +tensor([[0.], + [1.], + [2.]]) +torch.Size([3, 1]) +''' + +# 1์ด ์žˆ๋Š” ์ฐจ์›์„ ์‚ญ์ œ +# dim=? ์— ๊ฐ’์„ ๋„ฃ์„ ๊ฒฝ์šฐ ํ•ด๋‹น ์ฐจ์›์ด 1์ด๋ฉด ์‚ญ์ œ +print(ft.squeeze()) +print(ft.squeeze().shape) + +''' output +tensor([0., 1., 2.]) +torch.Size([3]) +''' +{% endhighlight %} +์œ„์™€ ๊ฐ™์ด ์ฐจ์›์ด 1์•„๋ฉด ์‚ญ์ œํ•ด ์ค€๋‹ค. `dim=?`๋„ ์„ค์ •ํ•ด ์ค„ ์ˆ˜ ์žˆ๋Š”๋ฐ, ์ด ๊ฒฝ์šฐ ํ•ด๋‹น ์ฐจ์›์ด 1์ด๋ฉด ์‚ญ์ œํ•œ๋‹ค. ์˜ˆ์ œ์™€ ๊ฐ™์€ tensor์˜ ๊ฒฝ์šฐ `dim=1`์ผ ๋•Œ ๋™์ผํ•œ ๊ฒฐ๊ณผ๋ฅผ ๋ณด์—ฌ์ค€๋‹ค. +
+### Unsqueeze +`squeeze`์˜ ๋ฐ˜๋Œ€์ด๋‹ค. ์ฐจ์›์„ ๋ช…์‹œํ•˜์—ฌ ๊ทธ ์ฐจ์›์„ 1๋กœ ๋งŒ๋“ค์–ด ์ค€๋‹ค. +{% highlight python %} +ft = torch.Tensor([0, 1, 2]) +print(ft.shape) + +''' output +torch.Size([3]) +''' +# ์ฐจ์›(dim)์„ ๋ช…์‹œํ•˜์—ฌ ๊ทธ ์ฐจ์›์„ 1๋กœ ๋งŒ๋“ค์–ด์คŒ +print(ft.unsqueeze(0)) +print(ft.unsqueeze(0).shape) + +print(ft.unsqueeze(1)) +print(ft.unsqueeze(1).shape) + +''' output +tensor([[0., 1., 2.]]) +torch.Size([1, 3]) + +tensor([[0.], + [1.], + [2.]]) +torch.Size([3, 1]) +''' +{% endhighlight %} +์ง€์ •ํ•œ ์ฐจ์›์„ 1๋กœ ๋งŒ๋“ค์–ด ์ฃผ๋Š” ๊ฒƒ์„ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. +{% highlight python %} +print(ft.view(1, -1)) +print(ft.view(1, -1).shape) + +print(ft.view(-1, 1)) +print(ft.view(-1, 1).shape) + +''' output +tensor([[0., 1., 2.]]) +torch.Size([1, 3]) + +tensor([[0.], + [1.], + [2.]]) +torch.Size([3, 1]) +''' +{% endhighlight %} +view๋ฅผ ์ด์šฉํ•ด์„œ ๊ฐ™์€ ๊ฒฐ๊ณผ๋ฅผ ๋งŒ๋“ค ์ˆ˜๋„ ์žˆ๋‹ค. +

+ +### Casting +tensor์˜ ํ˜•์„ ๋ณ€ํ™˜์‹œ์ผœ์ฃผ๋Š” ๊ฒƒ์„ ๋งํ•œ๋‹ค. +{% highlight python %} +# ํ…์„œ์˜ ํ˜•๋ณ€ํ™˜ +t = torch.LongTensor([1, 2, 3, 4]) +print(lt) + +''' output +tensor([1, 2, 3, 4]) +''' + +print(lt.float()) + +''' output +tensor([1., 2., 3., 4.]) +''' +{% endhighlight %} + +์œ„์™€ ๊ฐ™์ด `tensor.float()`์œผ๋กœ ์‹ค์ˆ˜ํ˜• ๋ณ€ํ™˜์„ ํ•  ์ˆ˜ ์žˆ๊ณ  + +{% highlight python %} +bt = torch.ByteTensor([True, False, False, True]) +print(bt) + +''' output +tensor([1, 0, 0, 1], dtype=torch.uint8) +''' + +print(bt.long()) +print(bt.float()) + +''' output +tensor([1, 0, 0, 1]) +tensor([1., 0., 0., 1.]) +''' +{% endhighlight %} + +์ด์ฒ˜๋Ÿผ torch์˜ ๋‹ค๋ฅธ ํƒ€์ž…๋“ค๋กœ๋„ ๋ณ€ํ™˜์ด ๊ฐ€๋Šฅํ•˜๋‹ค. +

+ +### Concatenation +tensor๋“ค์„ ์ด์–ด๋ถ™์ธ๋‹ค. + +{% highlight python %} +x = torch.FloatTensor([[1, 2], [3, 4]]) +y = torch.FloatTensor([[5, 6], [7, 8]]) + +# ์ฃผ์–ด์ง„ ์ฐจ์›์ด ๋Š˜์–ด๋‚จ (์ด์–ด ๋ถ™์ž„) +print(torch.cat([x, y], dim=0)) +print(torch.cat([x, y], dim=1)) + +''' output +tensor([[1., 2.], + [3., 4.], + [5., 6.], + [7., 8.]]) +tensor([[1., 2., 5., 6.], + [3., 4., 7., 8.]]) +''' +{% endhighlight %} + +์ด ๋•Œ ์ฐจ์›์„ ์ง€์ •ํ•ด ์ฃผ๋ฉด ๊ทธ ์ฐจ์›์œผ๋กœ tensor๋ฅผ ์ž‡๋Š”๋‹ค. default๋Š” `dim=0` +

+ +### Stacking +`cat`๊ณผ ๋น„์Šทํ•˜์ง€๋งŒ ๋ฐ์ดํ„ฐ์˜ ์ฐจ์›์€ ์œ ์ง€ํ•˜๋ฉด์„œ ์Œ“๋Š”๋‹ค๋Š” ๊ฐœ๋…์ด `cat`์˜ ์ด์–ด๋ธฅ์ธ๋‹ค๋Š” ๊ฐœ๋…๊ณผ๋Š” ์กฐ๊ธˆ ๋‹ค๋ฅด๋‹ค. + +{% highlight python %} +x = torch.FloatTensor([1, 4]) +y = torch.FloatTensor([2, 5]) +z = torch.FloatTensor([3, 6]) + + +print(torch.stack([x, y, z])) +print(torch.stack([x, y, z], dim=1)) + +''' output +tensor([[1., 4.], + [2., 5.], + [3., 6.]]) +tensor([[1., 2., 3.], + [4., 5., 6.]]) +''' +{% endhighlight %} + +๋งˆ์ฐฌ๊ฐ€์ง€๋กœ stackํ•˜๋Š” ๋ฐฉํ–ฅ์„ `dim`์œผ๋กœ ์กฐ์ ˆํ•ด ์ค„ ์ˆ˜ ์žˆ๋‹ค. + +๋˜ํ•œ, ์ด ๊ณผ์ •์€ `unsqueeze`์™€ `cat`์˜ ์กฐํ•ฉ์œผ๋กœ ๋˜‘๊ฐ™์ด ์ง„ํ–‰ํ•  ์ˆ˜ ์žˆ๋‹ค. + +{% highlight python %} +# torch.cat([(1, 2), (1, 2), (1, 2)], dim=0) -> (3, 2) (stacking๊ณผ ๋™์ผ) +print(torch.cat([x.unsqueeze(0), y.unsqueeze(0), z.unsqueeze(0)], dim=0)) + +''' output +tensor([[1., 4.], + [2., 5.], + [3., 6.]]) +''' +{% endhighlight %} + +#### cat vs. stack +cat ๊ณผ stack์ด ์กฐ๊ธˆ ํ—ท๊ฐˆ๋ ค์„œ ์ฐจ์ด์ ์„ ์ •๋ฆฌํ•ด๋ณผ๊นŒ ํ•œ๋‹ค. + +๊ฒฐ๊ณผ๊ฐ’๋“ค์˜ shape์„ ๋น„๊ตํ•ด ๋ณด๋ฉด ์กฐ๊ธˆ ๋” ์ฐจ์ด์ ์„ ์•Œ๊ธฐ ์‰ฝ๋‹ค. + +{% highlight python %} +x = torch.FloatTensor([[1, 2], [3, 4]]) +y = torch.FloatTensor([[5, 6], [7, 8]]) + +print(x.shape, y.shape) +print(torch.cat([x, y], dim=0).shape) +print(torch.cat([x, y], dim=1).shape) + +x = torch.FloatTensor([1, 4]) +y = torch.FloatTensor([2, 5]) +z = torch.FloatTensor([3, 6]) + +print(x.shape, y.shape, z.shape) +print(torch.stack([x, y, z]).shape) +print(torch.stack([x, y, z], dim=1).shape) + +''' output +torch.Size([2, 2]) torch.Size([2, 2]) +torch.Size([4, 2]) +torch.Size([2, 4] + +torch.Size([2]) torch.Size([2]), torch.Size([2] +torch.Size([3, 2]) +torch.Size([2, 3]) +''' +{% endhighlight %} + +![](/posting_imgs/lab1-4.jpg) + +์ฝ”๋“œ์˜ ๊ฒฐ๊ณผ์™€ ๊ทธ๋ฆผ์„ ํ•จ๊ป˜ ๋ณด์ž. + +concat์˜ ๊ฒฝ์šฐ ๋ง ๊ทธ๋Œ€๋กœ ์ด์–ด ๋ถ™์ด๋Š” ๊ฒƒ์ด๊ธฐ ๋•Œ๋ฌธ์— ์ง€์ •ํ•ด ์ค€ ์ฐจ์›์˜ ๊ธธ์ด๊ฐ€ ๊ฐ tensor์˜ ์ฐจ์› ๊ธธ์ด ๋งŒํผ ๋Š˜์–ด๋‚ฌ๋‹ค. ํ•˜์ง€๋งŒ ์ฐจ์›์˜ ๊ฐœ์ˆ˜ ๋ณ€ํ™”๋Š” ์—†๋‹ค. +๋ฐ˜๋ฉด, stack์€ ์ƒˆ๋กœ์šด ์ฐจ์›์˜ ๋ฐฉํ–ฅ์œผ๋กœ ์Œ“๋Š” ๊ณผ์ •์ด๊ธฐ ๋•Œ๋ฌธ์— tensor ๊ฐ„์˜ shape์ด ๊ฐ™์•„์•ผ ํ•˜๊ณ  ์ฐจ์›์ด ํ•˜๋‚˜ ๋” ๋Š˜์–ด๋‚œ๋‹ค. ๊ทธ๋ฆฌ๊ณ  ๊ทธ ์ฐจ์›์˜ ํฌ๊ธฐ๋Š” ์Œ“์œผ๋ ค๋Š” ํ…์„œ์˜ ๊ฐœ์ˆ˜์™€ ๊ฐ™๋‹ค. + +๋‹ค์‹œ ์ •๋ฆฌํ•ด ๋ณด๋ฉด conact์€ ๋ฐ์ดํ„ฐ๋ฅผ ๊ทธ๋Œ€๋กœ ์ž‡๋Š” ๊ฒƒ, stack์€ ์ƒˆ๋กœ์šด ์ฐจ์›์œผ๋กœ ์Œ“๋Š” ๊ฒƒ์ด๋ผ๋Š” ์ฐจ์ด์ ์ด ์žˆ๋‹ค. +

+ +### Ones and Zeros Like +ํ•ด๋‹น tensor์™€ ๊ฐ™์€ shape์˜ 1 ๋˜๋Š” 0์œผ๋กœ ์ฑ„์›Œ์ง„ tensor๋ฅผ ๋งŒ๋“ ๋‹ค. + +{% highlight python %} +x = torch.FloatTensor([[0, 1, 2], [2, 1, 0]]) +print(x) + +''' output +tensor([[0., 1., 2.], + [2., 1., 0.]]) +''' + +# device๋„ ๊ฐ™๊ฒŒ ์„ ์–ธ๋จ +print(torch.ones_like(x)) +print(torch.zeros_like(x)) + +''' output +tensor([[1., 1., 1.], + [1., 1., 1.]]) +tensor([[0., 0., 0.], + [0., 0., 0.]]) +''' +{% endhighlight %} + +์ด๋ ‡๊ฒŒ ์ƒ์„ฑ๋œ tensor๋Š” shape ๋ฟ๋งŒ ์•„๋‹ˆ๋ผ device๋„ ๊ฐ™๊ฒŒ ์ƒ์„ฑ๋œ๋‹ค. ์ฆ‰, ๋ฐ”๋กœ ๊ธฐ์กด์˜ tensor์™€ ์—ฐ์‚ฐ์ด ๊ฐ€๋Šฅํ•˜๋‹ค. +

+ +### In-place Operation +์„ ์–ธ ์—†์ด ๋ฐ”๋กœ ๊ฒฐ๊ณผ๊ฐ’์œผ๋กœ ๋Œ€์ฒดํ•œ๋‹ค. ์‚ฌ์šฉ๋ฒ•์€ ์—ฐ์‚ฐ์ž์— `_`๋ฅผ ๋ถ™์ด๋ฉด ๋œ๋‹ค. + +{% highlight python %} +print(x.mul(2.)) +print(x) +# ์„ ์–ธ ์—†์ด ๋ฐ”๋กœ ๋Œ€์ฒด +print(x.mul_(2.)) +print(x) + +''' output +tensor([[2., 4.], + [6., 8.]]) +tensor([[1., 2.], + [3., 4.]]) +tensor([[2., 4.], + [6., 8.]]) +tensor([[2., 4.], + [6., 8.]]) +''' +{% endhighlight %} + +์„ ์–ธํ•˜๋Š” ๊ณผ์ •์„ ์ƒ๋žตํ•  ์ˆ˜ ์žˆ๋‹ค๋Š” ์žฅ์ ์ด ์žˆ์ง€๋งŒ PyTorch์˜ gc๊ฐ€ ์ž˜ ์„ค๊ณ„๋˜์–ด ์žˆ์–ด์„œ ์†๋„๋ฉด์˜ ์ด์ ์€ ํฌ๊ฒŒ ์—†์„ ์ˆ˜๋„ ์žˆ๋‹ค๊ณ  ํ•œ๋‹ค. \ No newline at end of file diff --git a/_posts/2022-04-17-dlZeroToAll-PyTorch-2.markdown b/_posts/2022-04-17-dlZeroToAll-PyTorch-2.markdown new file mode 100644 index 00000000000..be171b19187 --- /dev/null +++ b/_posts/2022-04-17-dlZeroToAll-PyTorch-2.markdown @@ -0,0 +1,224 @@ +--- +title: "๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹ 2 - Lab2: Linear regression" +author: Kwon +date: 2022-04-17T16:00:00+0900 +categories: [pytorch, study] +tags: [linear-regressoin] +math: true +mermaid: false +--- + +[๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹](https://deeplearningzerotoall.github.io/season2/lec_pytorch.html) Lab 2: Linear regression ๊ฐ•์˜๋ฅผ ๋ณธ ํ›„ ๊ณต๋ถ€๋ฅผ ๋ชฉ์ ์œผ๋กœ ์ž‘์„ฑํ•œ ๊ฒŒ์‹œ๋ฌผ์ž…๋‹ˆ๋‹ค. + +*** +## Theoretical Overview + +### Hypothesis (๊ฐ€์„ค) +์„ ํ˜• ํšŒ๊ท€์—์„œ ์‚ฌ์šฉํ•˜๋Š” 1์ฐจ ๋ฐฉ์ •์‹์„ ๋งํ•œ๋‹ค. weight์™€ bias๋ฅผ ๊ณ„์† ๋ฐ”๊ฟ”๊ฐ€๋ฉด๊ฑฐ ๋งˆ์ง€๋ง‰ ํ•™์Šต์ด ๋๋‚œ ๋’ค์˜ ์ตœ์ข…๊ฐ’์„ ์‚ฌ์šฉํ•˜์—ฌ ๋ฐ์ดํ„ฐ๋ฅผ ์˜ˆ์ธกํ•œ๋‹ค. ์ˆ˜์‹์€ ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. + +\\[ H(x) = Wx + b \\] + +์ตœ์ข… ๊ฒฐ๊ณผ๋กœ ๋‚˜์˜จ ๊ฐ€์„ค์„ model์ด๋ผ ํ•˜๊ณ  'ํ•™์Šต๋˜์—ˆ๋‹ค'๊ณ  ํ•œ๋‹ค. + +### Cost +model์˜ ์˜ˆ์ธก ๊ฐ’์ด ์‹ค์ œ ๊ฐ’๊ณผ ์–ผ๋งˆ๋‚˜ ๋‹ค๋ฅธ ์ง€๋ฅผ ์•Œ๋ ค์ค€๋‹ค. (์ž‘์„์ˆ˜๋ก ์ข‹์€ ๋ชจ๋ธ์ด๋‹ค.) + +์—ฌ๊ธฐ์„œ๋Š” ์•„๋ž˜์™€ ๊ฐ™์€ MSE(Mean Square Error)๋ฅผ ์‚ฌ์šฉํ•œ๋‹ค. + +\\[ MSE = cost(W, b) = \frac{1}{m} \sum^m_{i=1} \left( H(x^{(i)}) - y^{(i)} \right)^2 \\] + +*** +## Import +{% highlight python %} +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.optim as optim +{% endhighlight %} + +*** +## Data +๋ฐ์ดํ„ฐ๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™์ด ๊ฐ„๋‹จํ•œ ๊ณต๋ถ€ ์‹œ๊ฐ„ - ์„ฑ์  ๋ฐ์ดํ„ฐ ์…‹์„ ์‚ฌ์šฉํ•œ๋‹ค. + +| ๊ณต๋ถ€ ์‹œ๊ฐ„ | ์„ฑ์  | +|:----:|:----:| +|1|1| +|2|2| +|3|3| +{:.inner-borders} + +{% highlight python %} +x_train = torch.FloatTensor([[1], [2], [3]]) +y_train = torch.FloatTensor([[1], [2], [3]]) +{% endhighlight %} + +๊ณต๋ถ€ ์‹œ๊ฐ„์ด ๊ฐ๊ฐ 1์‹œ๊ฐ„์—์„œ 3์‹œ๊ฐ„์ผ ๋•Œ ๊ทธ ์ ์ˆ˜๋„ 1์‹œ๊ฐ„์—์„œ 3์‹œ๊ฐ„์ธ ๋ฐ์ดํ„ฐ ์…‹์ด๋‹ค. + +์ด๋Ÿฐ ๋ฐ์ดํ„ฐ์…‹์ผ ๋•Œ ๊ฐ€์žฅ ์ด์ƒ์ ์ธ ํšŒ๊ท€์„ ์„ ํ•œ๋ฒˆ ์ƒ๊ฐํ•ด ๋ณด์ž. + +![](/posting_imgs/lab2-1.png) + +์œ„์™€ ๊ฐ™์€ $ y = x $ ๊ผด์˜ ์ง์„ ์ด ๊ฐ€์žฅ ์ด์ƒ์ ์ผ ๊ฒƒ์ด๋‹ค. ๊ทธ๋Ÿฌ๋ฏ€๋กœ ์ดํ›„ ํ•™์Šต์„ ์ง„ํ–‰ํ•  ๋•Œ weight=1, bias=0์— ๊ฐ€๊นŒ์›Œ ์ง€๋Š”์ง€ ํ™•์ธํ•˜๋ฉด ํ•™์Šต์ด ์ž˜ ์ด๋ฃจ์–ด์ง€๊ณ  ์žˆ๋Š”์ง€ ์•Œ ์ˆ˜ ์žˆ์„ ๊ฒƒ์ด๋‹ค. + +*** +## Weight Initialization + +{% highlight python %} +W = torch.zeros(1, requires_grad=True) +print(W) +b = torch.zeros(1, requires_grad=True) +print(b) + +''' output +tensor([0.], requires_grad=True) +tensor([0.], requires_grad=True) +''' +{% endhighlight %} + +ํ•™์Šตํ•  weight์™€ bias๋ฅผ ๋ชจ๋‘ 0์œผ๋กœ ์ดˆ๊ธฐํ™” ํ•ด ์ค€๋‹ค. + +*** +## Hypothesis +\\[ H(x) = Wx + b \\] + +์•ž์„œ ๋ณด์•˜๋˜ Hypothesis ์‹์— ๋”ฐ๋ผ ์ดˆ๊ธฐํ™”ํ•ด ์ค€๋‹ค. + +{% highlight python %} +hypothesis = x_train * W + b +print(hypothesis) + +''' output +tensor([[0.], + [0.], + [0.]], grad_fn=) +''' +{% endhighlight %} + +์ดˆ๊ธฐ weight์™€ bias๋Š” ๋ชจ๋‘ 0์ด๋ฏ€๋กœ ๋ชจ๋“  ๊ฐ’์ด 0์œผ๋กœ ๋‚˜์˜จ ๊ฒƒ์„ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. + +*** +## Cost +\\[ MSE = cost(W, b) = \frac{1}{m} \sum^m_{i=1} \left( H(x^{(i)}) - y^{(i)} \right)^2 \\] + +Cost๋„ ๋งˆ์ฐฌ๊ฐ€์ง€๋กœ ์•ž์„œ ๋‚˜์˜จ ์‹์— ๋งž์ถฐ ์ •์˜ํ•ด ์ค€๋‹ค. + +{% highlight python %} +cost = torch.mean((hypothesis - y_train) ** 2) +print(cost) + +''' output +tensor(4.6667, grad_fn=) +''' +{% endhighlight %} + +*** +## Gradient Descent +`optim`์„ ํ†ตํ•ด SGD optimer๋ฅผ ๋ถˆ๋Ÿฌ ์‚ฌ์šฉํ•œ๋‹ค. Gradient Descent์— ๋Œ€ํ•œ ๋‚ด์šฉ์€ ๋‹ค์Œ ํฌ์ŠคํŒ…์—์„œ ๋” ์ž์„ธํ•˜๊ฒŒ ์‚ดํŽด ๋ณผ ์˜ˆ์ •์ด๋‹ค. + +{% highlight python %} +optimizer = optim.SGD([W, b], lr=0.01) +{% endhighlight %} + +๊ทธ๋ฆฌ๊ณ  PyTorch์—์„œ ํ•™์Šต์„ ํ•  ๋•Œ ํ•ญ์ƒ ๋ฌถ์–ด ์“ฐ๋Š” ์ฝ”๋“œ๋“ค์ด ์žˆ๋‹ค. + +{% highlight python %} +optimizer.zero_grad() # gradient ์ดˆ๊ธฐํ™” +cost.backward() # gradient ๊ณ„์‚ฐ +optimizer.step() # ๊ณ„์‚ฐ๋œ gradient๋ฅผ ๋”ฐ๋ผ W, b๋ฅผ ๊ฐœ์„  + +print(W) +print(b) + +''' output +tensor([0.0933], requires_grad=True) +tensor([0.0400], requires_grad=True) +''' +{% endhighlight %} + +์œ„ 3๊ฐœ์˜ ์ฝ”๋“œ๋Š” gradient๋ฅผ ์ดˆ๊ธฐํ™” ํ•˜๊ณ , cost์— ๋”ฐ๋ผ ๊ณ„์‚ฐํ•˜๊ณ , ๊ทธ ๊ฒฐ๊ณผ์— ๋”ฐ๋ผ weight์™€ bias๋ฅผ ๊ฐœ์„ ํ•˜๋Š” ๊ณผ์ •์ด๋‹ค. ํ•™์Šตํ•  ๋•Œ ํ•„์š”ํ•œ ๋ถ€๋ถ„์ด๋ฏ€๋กœ ์™ธ์›Œ๋‘์–ด์•ผ ํ•œ๋‹ค. + +gradient๋ฅผ ๊ณ„์‚ฐํ•  ๋•Œ ๊ฐ ์‹œํ–‰๋งˆ๋‹ค `optimizer.zero_grad()`๋กœ gradient๋ฅผ ์ดˆ๊ธฐํ™”ํ•˜๋Š” ์ด์œ ๋Š” `cost.backward()`๊ฐ€ ๊ณ„์‚ฐ์„ ํ•˜๊ณ  ๊ธฐ์กด gradient๋ฅผ ๋Œ€์ฒดํ•˜๋Š” ๊ฒƒ์ด ์•„๋‹ˆ๋ผ ๋”ํ•˜๊ธฐ ๋•Œ๋ฌธ์ด๋ผ๊ณ  ํ•œ๋‹ค.(DNN์—์„œ์˜ backpropagation์„ ํ•  ๋•Œ ์œ ์šฉํ•˜๊ธฐ ๋•Œ๋ฌธ) +๊ทธ๋ž˜์„œ `optimizer.zero_grad()`๋ฅผ ํ†ตํ•ด ์ดˆ๊ธฐํ™”ํ•ด ์ฃผ์ง€ ์•Š์œผ๋ฉด ์ „ ์‹œํ–‰์˜ gradient์™€ ํ˜„์žฌ์˜ ๊ฒƒ์ด ๋ˆ„์ ๋˜์–ด ์ž˜๋ชป๋œ ๋ฐฉํ–ฅ์œผ๋กœ ํ•™์Šต๋˜๊ฒŒ ๋œ๋‹ค. + +์œ„ ์ฝ”๋“œ๋ฅผ ํ•œ ๋ฒˆ ์‹คํ–‰ํ•œ ๊ฒƒ์ด ํ•œ ๋ฒˆ ํ•™์Šตํ•œ ๊ฒƒ๊ณผ ๊ฐ™๋‹ค. ๊ทธ ๊ฒฐ๊ณผ์— ๋”ฐ๋ผ weight์™€ bias์˜ ๊ฐœ์„ ์ด ์ด๋ฃจ์–ด์ง„ ๊ฒƒ์„ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. + +๊ฐœ์„ ๋œ weight์™€ bias๋กœ ๋‹ค์‹œ ์˜ˆ์ธก์„ ํ•ด ๋ณด๋ฉด, ๋‹ค์Œ๊ณผ ๊ฐ™์€ ๊ฐ’์„ ์–ป์„ ์ˆ˜ ์žˆ๋‹ค. + +{% highlight python %} +hypothesis = x_train * W + b +print(hypothesis) + +cost = torch.mean((hypothesis - y_train) ** 2) +print(cost) + +''' output +tensor([[0.1333], + [0.2267], + [0.3200]], grad_fn=) + +tensor(3.6927, grad_fn=) +''' +{% endhighlight %} + +๊ธฐ์กด์— 0์ด์—ˆ๋˜ ๊ฐ’๋“ค์ด ์‹ค์ œ ๊ฐ’์— ๊ฐ€๊น๊ฒŒ ๋ณ€ํ–ˆ๊ณ , cost๋„ ์ค„์–ด๋“  ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. + +*** +## Training with Full Code +{% highlight python %} +# ๋ฐ์ดํ„ฐ +x_train = torch.FloatTensor([[1], [2], [3]]) +y_train = torch.FloatTensor([[1], [2], [3]]) +# ๋ชจ๋ธ ์ดˆ๊ธฐํ™” +W = torch.zeros(1, requires_grad=True) +b = torch.zeros(1, requires_grad=True) +# optimizer ์„ค์ • +optimizer = optim.SGD([W, b], lr=0.01) + +nb_epochs = 1000 +for epoch in range(nb_epochs + 1): + + # H(x) ๊ณ„์‚ฐ + hypothesis = x_train * W + b + + # cost ๊ณ„์‚ฐ + cost = torch.mean((hypothesis - y_train) ** 2) + + # cost๋กœ H(x) ๊ฐœ์„  + optimizer.zero_grad() + cost.backward() + optimizer.step() + + # 100๋ฒˆ๋งˆ๋‹ค ๋กœ๊ทธ ์ถœ๋ ฅ + if epoch % 100 == 0: + print('Epoch {:4d}/{} W: {:.3f}, b: {:.3f} Cost: {:.6f}'.format( + epoch, nb_epochs, W.item(), b.item(), cost.item() + )) + +''' output +Epoch 0/1000 W: 0.093, b: 0.040 Cost: 4.666667 +Epoch 100/1000 W: 0.873, b: 0.289 Cost: 0.012043 +Epoch 200/1000 W: 0.900, b: 0.227 Cost: 0.007442 +Epoch 300/1000 W: 0.921, b: 0.179 Cost: 0.004598 +Epoch 400/1000 W: 0.938, b: 0.140 Cost: 0.002842 +Epoch 500/1000 W: 0.951, b: 0.110 Cost: 0.001756 +Epoch 600/1000 W: 0.962, b: 0.087 Cost: 0.001085 +Epoch 700/1000 W: 0.970, b: 0.068 Cost: 0.000670 +Epoch 800/1000 W: 0.976, b: 0.054 Cost: 0.000414 +Epoch 900/1000 W: 0.981, b: 0.042 Cost: 0.000256 +Epoch 1000/1000 W: 0.985, b: 0.033 Cost: 0.000158 +''' +{% endhighlight %} + +1000 Epoch ํ•™์Šตํ•˜๋Š” ์ „์ฒด ์ฝ”๋“œ์ด๋‹ค. ๋กœ๊ทธ์—์„œ ์•Œ ์ˆ˜ ์žˆ๋“ฏ์ด cost๋Š” ์ ์  ์ค„์–ด๋“ค๊ณ  weight์™€ bias๋„ ์šฐ๋ฆฌ๊ฐ€ ์˜ˆ์ƒํ–ˆ๋˜ ํšŒ๊ท€์„ ์˜ ๊ฒƒ๊ณผ ๋น„์Šทํ•ด์กŒ๋‹ค. + +{% highlight python %} +x_train * W + b + +''' output +tensor([[1.0186], + [2.0040], + [2.9894]], grad_fn=) +''' +{% endhighlight %} + +์‹ค์ œ๋กœ ํ•™์Šต๋œ ๋ชจ๋ธ๋กœ ์˜ˆ์ธก์„ ํ•ด ๋ณด๋ฉด ์‹ค์ œ ๊ฐ’๊ณผ ์•„์ฃผ ๋น„์Šทํ•˜๊ฒŒ ๋‚˜์˜จ ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. \ No newline at end of file diff --git a/_posts/2022-04-18-dlZeroToAll-PyTorch-3.markdown b/_posts/2022-04-18-dlZeroToAll-PyTorch-3.markdown new file mode 100644 index 00000000000..7db2293b28f --- /dev/null +++ b/_posts/2022-04-18-dlZeroToAll-PyTorch-3.markdown @@ -0,0 +1,217 @@ +--- +title: "๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹ 2 - Lab3: Minimizing Cost" +author: Kwon +date: 2022-04-18T14:00:00+0900 +categories: [pytorch, study] +tags: [linear-regressoin, cost, gradient-descent] +math: true +mermaid: false +--- + +[๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹](https://deeplearningzerotoall.github.io/season2/lec_pytorch.html) Lab 3: Minimizing Cost ๊ฐ•์˜๋ฅผ ๋ณธ ํ›„ ๊ณต๋ถ€๋ฅผ ๋ชฉ์ ์œผ๋กœ ์ž‘์„ฑํ•œ ๊ฒŒ์‹œ๋ฌผ์ž…๋‹ˆ๋‹ค. + +*** +## Theoretical Overview +์ด๋ฒˆ์—๋Š” Grdient descent์— ๋Œ€ํ•ด ์กฐ๊ธˆ ๋” ์ง‘์ค‘์ ์œผ๋กœ ์•Œ์•„๋ณด๊ธฐ ์œ„ํ•ด Hypothesis๋ฅผ ์กฐ๊ธˆ ๋” ๋‹จ์ˆœํ•˜๊ฒŒ $ H(x) = Wx $๋กœ ๋ฐ”๊พธ์–ด ์‚ดํŽด๋ณด์ž. + +cost๋Š” ๋˜‘๊ฐ™์ด MSE(Mean Square Error)๋ฅผ ์‚ฌ์šฉํ•˜๊ณ  ๋ฐ์ดํ„ฐ๋„ ์ด์ „๊ณผ ๊ฐ™์€ ๊ณต๋ถ€ ์‹œ๊ฐ„ - ์„ฑ์  ๋ฐ์ดํ„ฐ๋ฅผ ์‚ฌ์šฉํ•œ๋‹ค. ([Lab2 ํฌ์ŠคํŒ… ์ฐธ์กฐ](/posts/dlZeroToAll-PyTorch-2/)) + +\\[ MSE = cost(W) = \frac{1}{m} \sum^m_{i=1} \left( H(x^{(i)}) - y^{(i)} \right)^2 \\] + +*** + +## Import +{% highlight python %} +import matplotlib.pyplot as plt +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.optim as optim +{% endhighlight %} + +*** +## Cost by W +W์˜ ๋ณ€ํ™”์— ๋”ฐ๋ฅธ cost ๊ทธ๋ž˜ํ”„๋ฅผ ๊ทธ๋ ค๋ณด๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™์€ 2์ฐจ ๊ณก์„ ์ด ๊ทธ๋ ค์ง„๋‹ค. ๊ทธ๋Ÿฌ๋ฏ€๋กœ cost๊ฐ€ ๊ฐ€์žฅ ์ž‘์€ ์ ์€ ๊ธฐ์šธ๊ธฐ(๋ฏธ๋ถ„๊ฐ’)๊ฐ€ 0์ธ ๊ทน์†Œ์ ์ด๋‹ค. + +{% highlight python %} +W_l = np.linspace(-5, 7, 1000) +cost_l = [] +for W in W_l: + hypothesis = W * x_train + cost = torch.mean((hypothesis - y_train) ** 2) + + cost_l.append(cost.item()) + +plt.plot(W_l, cost_l) +plt.xlabel('$W$') +plt.ylabel('Cost') +plt.show() +{% endhighlight %} +![](/posting_imgs/lab3-1.png) +
+ +*** +## Gradient Descent by Hand +cost๊ฐ€ ๊ฐ€์žฅ ์ž‘์€ ์ ์„ ์ฐพ๋Š” ๊ฒƒ์ด ์šฐ๋ฆฌ์˜ ๋ชฉํ‘œ์ธ๋ฐ, ์ด๊ฒƒ์„ cost์˜ ๋ฏธ๋ถ„๊ฐ’์„ ์ด์šฉํ•˜๋Š” ๋ฐฉ์‹์œผ๋กœ ๋‹ฌ์„ฑํ•˜๋ ค๊ณ  ํ•œ๋‹ค. + +cost๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™์œผ๋ฏ€๋กœ +

+\\[ MSE = cost(W) = \frac{1}{m} \sum^m_{i=1} \left( H(x^{(i)}) - y^{(i)} \right)^2 \\] + +$W$์— ๋Œ€ํ•ด ๋ฏธ๋ถ„ํ•˜๋ฉด ์•„๋ž˜์™€ ๊ฐ™์€ ๊ฒฐ๊ณผ๋ฅผ ์–ป์„ ์ˆ˜ ์žˆ๋‹ค. + +\\[ \nabla W = \frac{\partial cost}{\partial W} = \frac{2}{m} \sum^m_{i=1} \left( Wx^{(i)} - y^{(i)} \right)x^{(i)} \\] + +์ด๋ ‡๊ฒŒ ๊ตฌํ•œ gradient๋Š” ๋‹ค์Œ ์‹๊ณผ ๊ฐ™์ด ํ•™์Šต์— ์ ์šฉํ•˜๊ฒŒ ๋œ๋‹ค. + +\\[ W := W - \alpha \nabla W \,\,\left(\alpha = learning\,\,rate\right)\\] + +์ด๋Ÿฐ ํ˜•ํƒœ๋กœ ํ•™์Šต์„ ํ•˜๋Š” ์ด์œ ๋ฅผ ํ•œ๋ฒˆ ์•Œ์•„๋ณด์ž. +

+์•„๋ž˜์˜ ๋‘ gif๋Š” ๊ฐ๊ฐ ๊ทน์†Œ์ ์˜ ์ขŒ์šฐ์—์„œ ๊ทน์†Œ์ ์— ์ ‘๊ทผํ•  ๋•Œ ์ ‘์„ ์˜ ๋ณ€ํ™”๋ฅผ ๋‚˜ํƒ€๋‚ธ ๊ฒƒ์ด๋‹ค. +

+![](/posting_imgs/lab3-2.gif) +

+๋จผ์ € ์™ผ์ชฝ์—์„œ ์ ‘๊ทผํ•˜๋Š” ๊ฒฝ์šฐ ๊ธฐ์šธ๊ธฐ(gradient)๊ฐ€ ์Œ์ˆ˜์ด๊ณ  ๊ทน์†Œ์ ์œผ๋กœ ๋„๋‹ฌํ•˜๊ธฐ ์œ„ํ•ด์„œ๋Š” $W$๊ฐ€ ์ปค์ ธ์•ผ ํ•œ๋‹ค. ๊ทธ๋Ÿฌ๋ฏ€๋กœ ์Œ์ˆ˜์ธ ๊ธฐ์šธ๊ธฐ๋ฅผ ๋นผ์ฃผ์–ด ๊ทน์†Œ์ ์— ๊ฐ€๊น๊ฒŒ ๋„๋‹ฌํ•  ์ˆ˜ ์žˆ๋‹ค. +

+![](/posting_imgs/lab3-3.gif) +

+๋‹ค์Œ์œผ๋กœ ์˜ค๋ฅธ์ชฝ์—์„œ ์ ‘๊ทผํ•˜๋Š” ๊ฒฝ์šฐ ๊ธฐ์šธ๊ธฐ๊ฐ€ ์–‘์ˆ˜์ด๊ณ  ๊ทน์†Œ์ ์œผ๋กœ ๋„๋‹ฌํ•˜๊ธฐ ์œ„ํ•ด์„œ๋Š” $W$๊ฐ€ ์ž‘์•„์ ธ์•ผ ํ•œ๋‹ค. ์ด ๋•Œ๋Š” ์–‘์ˆ˜์ธ ๊ธฐ์šธ๊ธฐ๋ฅผ ๋นผ์ฃผ์–ด ๊ทน์†Œ์ ์— ๊ฐ€๊น๊ฒŒ ๋„๋‹ฌํ•  ์ˆ˜ ์žˆ๋‹ค. + +๊ฒฐ๊ตญ ์ด ๋‘˜๋‹ค ๋นผ์•ผํ•˜๋ฏ€๋กœ ๋ชจ๋‘ ๋งŒ์กฑํ•˜๋Š” ์‹์ด $ W := W - \alpha \nabla W $, ๊ธฐ์šธ๊ธฐ์˜ ๋บ„์…ˆ์œผ๋กœ ์ฃผ์–ด์ง€๋Š” ๊ฒƒ์ด๋‹ค. ์ด ๋•Œ $learning\,\,rate$์ธ $\alpha$๋Š” ๋ง ๊ทธ๋Œ€๋กœ ํ•™์Šต๋ฅ (ํ•œ ๋ฒˆ์— ํ•™์Šต์„ ์–ผ๋งˆ๋‚˜ ํ•  ๊ฒƒ์ธ๊ฐ€)์„ ๋‚˜ํƒ€๋‚ด๋Š” ๊ฒƒ์ด๋ฏ€๋กœ ์ƒํ™ฉ์— ๋งž๊ฒŒ ์ตœ์ ํ™” ํ•˜์—ฌ ์‚ฌ์šฉํ•œ๋‹ค. + +๋‹ค๋งŒ, ํ•™์Šต๋ฅ ์ด ๋„ˆ๋ฌด ์ž‘์œผ๋ฉด ์ˆ˜๋ ด์ด ๋Šฆ์–ด์ง€๊ณ , ๋„ˆ๋ฌด ํฌ๋ฉด ์ง„๋™ํ•˜๋ฉด ๋ฐœ์‚ฐํ•ด ๋ฒ„๋ฆฌ๊ธฐ ๋•Œ๋ฌธ์— ์ ์ ˆํ•œ ๋ฒ”์œ„์˜ ๊ฐ’์„ ์‚ฌ์šฉํ•ด์•ผ ํ•œ๋‹ค. + +![](/posting_imgs/lab3-4.jpg) + +์ด์–ด์„œ ์•ž์„  ์‹๋“ค์„ ์ฝ”๋“œ๋กœ ํ‘œํ˜„ํ•˜๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. + +\\[ \nabla W = \frac{\partial cost}{\partial W} = \frac{2}{m} \sum^m_{i=1} \left( Wx^{(i)} - y^{(i)} \right)x^{(i)} \\] + +{% highlight python %} +gradient = torch.sum((W * x_train - y_train) * x_train) +print(gradient) + +''' output +tensor(-14.) +''' +{% endhighlight %} + +\\[ W := W - \alpha \nabla W \,\,\left(\alpha = learning\,\,rate\right)\\] + +{% highlight python %} +lr = 0.1 +W -= lr * gradient +print(W) + +''' output +tensor(1.4000) +''' +{% endhighlight %} + +*** +## Training +์•ž์„œ ๊ตฌํ˜„ํ–ˆ๋˜ ๊ฒƒ๋“ค์„ ํ™œ์šฉํ•˜์—ฌ ์‹ค์ œ๋กœ ํ•™์Šตํ•˜๋Š” ์ฝ”๋“œ๋ฅผ ์ž‘์„ฑํ•ด ๋ณด๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. + +{% highlight python %} +# ๋ฐ์ดํ„ฐ +x_train = torch.FloatTensor([[1], [2], [3]]) +y_train = torch.FloatTensor([[1], [2], [3]]) +# ๋ชจ๋ธ ์ดˆ๊ธฐํ™” +W = torch.zeros(1) +# learning rate ์„ค์ • +lr = 0.1 + +nb_epochs = 10 +for epoch in range(nb_epochs + 1): + + # H(x) ๊ณ„์‚ฐ + hypothesis = x_train * W + + # cost gradient ๊ณ„์‚ฐ + cost = torch.mean((hypothesis - y_train) ** 2) + gradient = torch.sum((W * x_train - y_train) * x_train) + + print('Epoch {:4d}/{} W: {:.3f}, Cost: {:.6f}'.format( + epoch, nb_epochs, W.item(), cost.item() + )) + + # gradient๋กœ H(x) ๊ฐœ์„  + W -= lr * gradient + +''' output +Epoch 0/10 W: 0.000, Cost: 4.666667 +Epoch 1/10 W: 1.400, Cost: 0.746666 +Epoch 2/10 W: 0.840, Cost: 0.119467 +Epoch 3/10 W: 1.064, Cost: 0.019115 +Epoch 4/10 W: 0.974, Cost: 0.003058 +Epoch 5/10 W: 1.010, Cost: 0.000489 +Epoch 6/10 W: 0.996, Cost: 0.000078 +Epoch 7/10 W: 1.002, Cost: 0.000013 +Epoch 8/10 W: 0.999, Cost: 0.000002 +Epoch 9/10 W: 1.000, Cost: 0.000000 +Epoch 10/10 W: 1.000, Cost: 0.000000 +''' +{% endhighlight %} + +**Hypothesis output ๊ณ„์‚ฐ -> cost์™€ gradient ๊ณ„์‚ฐ -> gradient๋กœ hypothesis(weight) ๊ฐœ์„ ** + +์œ„์™€ ๊ฐ™์€ ์ˆœ์„œ๋กœ ์ด 10 epoch ํ•™์Šตํ•˜๋Š” ์ฝ”๋“œ์ด๋‹ค. ํ•™์Šต์„ ํ•œ๋ฒˆ ํ•  ๋•Œ๋งˆ๋‹ค cost๊ฐ€ ์ค„์–ด๋“ค๊ณ , ์šฐ๋ฆฌ๊ฐ€ ์ƒ๊ฐํ•œ ์ด์ƒ์ ์ธ $W$์ธ 1์— ์ ์  ๊ฐ€๊นŒ์›Œ์ง€๋Š” ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. + +*** +## Training with `optim` + +**Training**์—์„œ ํ–ˆ๋˜ ๊ฒƒ์ฒ˜๋Ÿผ ์šฐ๋ฆฌ๊ฐ€ gradient๋ฅผ ๊ณ„์‚ฐํ•˜๋Š” ์ฝ”๋“œ๋ฅผ ์ง์ ‘ ์ž‘์„ฑํ•˜์—ฌ ์‚ฌ์šฉํ•  ์ˆ˜๋„ ์žˆ์ง€๋งŒ PyTorch์—์„œ ์ œ๊ณตํ•˜๋Š” `optim`์„ ์ด์šฉํ•˜์—ฌ ๊ฐ„๋‹จํ•˜๊ฒŒ ๊ตฌํ˜„ํ•  ์ˆ˜๋„ ์žˆ๋‹ค. + +{% highlight python %} +# ๋ฐ์ดํ„ฐ +x_train = torch.FloatTensor([[1], [2], [3]]) +y_train = torch.FloatTensor([[1], [2], [3]]) +# ๋ชจ๋ธ ์ดˆ๊ธฐํ™” +W = torch.zeros(1, requires_grad=True) +# optimizer ์„ค์ • +optimizer = optim.SGD([W], lr=0.15) + +nb_epochs = 10 +for epoch in range(nb_epochs + 1): + + # H(x) ๊ณ„์‚ฐ + hypothesis = x_train * W + + # cost ๊ณ„์‚ฐ + cost = torch.mean((hypothesis - y_train) ** 2) + + print('Epoch {:4d}/{} W: {:.3f} Cost: {:.6f}'.format( + epoch, nb_epochs, W.item(), cost.item() + )) + + # cost๋กœ H(x) ๊ฐœ์„  + optimizer.zero_grad() + cost.backward() + optimizer.step() + +''' output +Epoch 0/10 W: 0.000 Cost: 4.666667 +Epoch 1/10 W: 1.400 Cost: 0.746667 +Epoch 2/10 W: 0.840 Cost: 0.119467 +Epoch 3/10 W: 1.064 Cost: 0.019115 +Epoch 4/10 W: 0.974 Cost: 0.003058 +Epoch 5/10 W: 1.010 Cost: 0.000489 +Epoch 6/10 W: 0.996 Cost: 0.000078 +Epoch 7/10 W: 1.002 Cost: 0.000013 +Epoch 8/10 W: 0.999 Cost: 0.000002 +Epoch 9/10 W: 1.000 Cost: 0.000000 +Epoch 10/10 W: 1.000 Cost: 0.000000 +''' +{% endhighlight %} + +`optim.SGD`๊ฐ€ ์šฐ๋ฆฌ๊ฐ€ ๋งŒ๋“ค์–ด์„œ ๊ตฌํ˜„ํ–ˆ๋˜ gradient์— ๋Œ€ํ•œ ์ฒ˜๋ฆฌ๋ฅผ ํ•ด์ฃผ๊ณ  ์žˆ๋Š” ๊ฒƒ์„ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. +{% highlight python %} +optimizer.zero_grad() # gradient ์ดˆ๊ธฐํ™” +cost.backward() # gradient ๊ณ„์‚ฐ +optimizer.step() # ๊ณ„์‚ฐ๋œ gradient๋ฅผ ๋”ฐ๋ผ W, b๋ฅผ ๊ฐœ์„  +{% endhighlight %} +์ €๋ฒˆ ๊ฐ•์˜์—์„œ๋„ ๋“ฑ์žฅํ–ˆ๋˜ ์œ„ 3๊ฐœ์˜ ๋ฌถ์Œ ์ฝ”๋“œ๋ฅผ ํ†ตํ•ด gradient์— ๋Œ€ํ•œ ๊ณ„์‚ฐ๊ณผ ๊ทธ์— ๋”ฐ๋ฅธ ํ•™์Šต์ด ์ด๋ฃจ์–ด์ง€๊ณ  ์žˆ๋‹ค. + +์ด ๋•Œ๋„ ๋งˆ์ฐฌ๊ฐ€์ง€๋กœ $W$์™€ cost๋ฅผ ๋ณด๋ฉด ์ž˜ ํ•™์Šต์ด ๋˜๋Š” ๊ฒƒ์„ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. \ No newline at end of file diff --git a/_posts/2022-04-26-entropy.markdown b/_posts/2022-04-26-entropy.markdown new file mode 100644 index 00000000000..ccafd71aa54 --- /dev/null +++ b/_posts/2022-04-26-entropy.markdown @@ -0,0 +1,60 @@ +--- +title: "Entropy(์—”ํŠธ๋กœํ”ผ)๋ž€?" +author: Kwon +date: 2022-04-26T14:40:00+0900 +categories: [background] +tags: [entropy] +math: true +mermaid: false +--- + +### Related Post + +1. **Entropy** +2. [KL Divergence](/posts/KL-divergence/) +3. [Cross Entropy](/posts/cross-entropy/) + +*** +์˜คํ† ์ธ์ฝ”๋”์— ๋Œ€ํ•œ ๊ณต๋ถ€๋ฅผ ํ•˜๋‹ค๊ฐ€ entropy, cross entropy, KL divergence ๋“ฑ ๋ชจ๋ฅด๋Š” ์šฉ์–ด๋“ค์ด ์žˆ์–ด์„œ ๋” ์ด์ƒ ๋ฏธ๋ฃจ์ง€ ์•Š๊ณ  ๊ณต๋ถ€๋ฅผ ํ•ด ๋ณด๊ธฐ๋กœ ํ–ˆ๋‹ค. + +*** +## Entropy + +์—”ํŠธ๋กœํ”ผ์˜ ์ •์˜๋Š” ์ผ๋ฐ˜์ ์ธ ์šฉ์–ด๋ƒ, ๊ณผํ•™ ์šฉ์–ด๋ƒ ๋“ฑ์— ๋”ฐ๋ผ ๋‹ฌ๋ผ์งˆ ์ˆ˜ ์žˆ์ง€๋งŒ ์ด๋ฒˆ์— ๋‹ค๋ค„ ๋ณผ ์—”ํŠธ๋กœํ”ผ๋Š” ์ •๋ณด ์—”ํŠธ๋กœํ”ผ์ด๋‹ค. + +์ •๋ณด์ด๋ก ์—์„œ์˜ ์—”ํŠธ๋กœํ”ผ๋ž€ ๊ฐ ์ •๋ณด์— ํฌํ•จ๋œ ๊ธฐ๋Œ“๊ฐ’(ํ‰๊ท  ์ •๋ณด๋Ÿ‰), ์ •๋ณด์˜ ๋ถˆํ™•์‹ค๋Ÿ‰๋“ฑ์œผ๋กœ ์„ค๋ช…ํ•  ์ˆ˜ ์žˆ์ง€๋งŒ ๋‚ด๊ฐ€ ์—ฌ๋Ÿฌ ์ž๋ฃŒ๋“ค์„ ์ฐพ์•„๋ณด๋ฉด์„œ ๋จธ์‹ ๋Ÿฌ๋‹์˜ ๊ด€์ ์—์„œ ๊ฐ€์žฅ ์™€๋‹ฟ์•˜๋˜ ์˜๋ฏธ๋Š” **์˜ˆ์ธกํ•˜๊ธฐ ์–ด๋ ค์šด ์ •๋„**์˜€๋‹ค. + +๊ทธ๋Ÿผ ์—”ํŠธ๋กœํ”ผ๊ฐ€ ๋ฌด์—‡์ด๊ธธ๋ž˜ ์ด๋ ‡๊ฒŒ ๋‹ค์–‘ํ•œ ์˜๋ฏธ๋กœ ํ•ด์„๋  ์ˆ˜ ์žˆ๋Š”์ง€, ๊ทธ๋ฆฌ๊ณ  ๋‚ด๊ฐ€ ์™œ **์˜ˆ์ธกํ•˜๊ธฐ ์–ด๋ ค์šด ์ •๋„**๋ฅผ ๊ฐ€์žฅ ์™€๋‹ฟ๊ฒŒ ๋А๋‚€ ๊ฒƒ์ธ์ง€ ํ•œ๋ฒˆ ์•Œ์•„๋ณด์ž. + +๋จผ์ € ์ •๋ณด๋Ÿ‰์ด๋ผ๋Š” ๊ฒƒ์„ ์•Œ์•„์•ผ ํ•œ๋‹ค. ์ •๋ณด๋Ÿ‰์€ ์ •๋ณด๋ฅผ ์ •๋Ÿ‰ํ™” ํ•œ ๊ฒƒ์œผ๋กœ ๊ทธ ์ •๋ณด์— ๋Œ€ํ•œ ๊ฐ€์น˜๋ฅผ ๋‚˜ํƒ€๋‚ธ๋‹ค๊ณ  ๋ณผ ์ˆ˜ ์žˆ๋‹ค. + +์˜ˆ๋ฅผ ๋“ค์–ด ์—ฌ๊ธฐ ๋‘๊ฐ€์ง€์˜ ์ •๋ณด๊ฐ€ ์žˆ๋‹ค. + +> ๋‹ค์ด์•„๋ชฌ๋“œ๋Š” ๋”ฑ๋”ฑํ•˜๋‹ค. + +> ๋‹ค์ด์•„๋ชฌ๋“œ๊ฐ€ ๊ฐ€์žฅ ๋งŽ์ด ๋ฌปํ˜€ ์žˆ๋Š” ๊ด‘์‚ฐ์€ ๋Ÿฌ์‹œ์•„์— ์žˆ๋‹ค. + +์œ„ ๋‘ ์ •๋ณด ์ค‘ ์–ด๋А ๊ฒƒ์ด ๋” ๊ฐ€์น˜๊ฐ€ ๋†’์€ ์ •๋ณด์ผ๊นŒ ํ•œ๋ฒˆ ์ƒ๊ฐํ•ด๋ณด์ž. ๋‹ค์ด์•„๋ชฌ๋“œ๊ฐ€ ๋”ฑ๋”ฑํ•˜๋‹ค๋Š” ๊ฒƒ์€ ๋Œ€๋ถ€๋ถ„์˜ ์‚ฌ๋žŒ์ด ์•Œ๊ณ  ์žˆ๋Š” ์‚ฌ์‹ค์ด๋ฏ€๋กœ ์ •๋ณด๋กœ์„œ์˜ ๊ฐ€์น˜๊ฐ€ ๋‚ฎ๋‹ค๊ณ  ํ•  ์ˆ˜ ์žˆ๋‹ค. ํ•˜์ง€๋งŒ ๋‹ค์ด์•„๋ชฌ๋“œ๊ฐ€ ๊ฐ€์žฅ ๋งŽ์ด ๋ฌปํ˜€ ์žˆ๋Š” ๊ด‘์‚ฐ์ด ์–ด๋””์— ์žˆ๋Š”์ง€๋Š” ๋Œ€๋ถ€๋ถ„์˜ ์‚ฌ๋žŒ์ด ์•„๋Š” ์ •๋ณด๊ฐ€ ์•„๋‹ˆ๋ฏ€๋กœ ์ƒ๋Œ€์ ์œผ๋กœ ๊ฐ€์น˜๊ฐ€ ๋†’๋‹ค๊ณ  ํ•  ์ˆ˜ ์žˆ๋‹ค. + +์ฆ‰, **์ •๋ณด๋Ÿ‰**์€ +* ๋‹น์—ฐํ•œ ์‚ฌ์‹ค์ผ์ˆ˜๋ก ๋‚ฎ๋‹ค. +* ๊ทธ ์ •๋ณด๊ฐ€ ๋ถˆํ™•์‹คํ• ์ˆ˜๋ก ๋†’๋‹ค. +* ์™„์ „ํžˆ ์˜ˆ์ธก ๊ฐ€๋Šฅํ•˜๋ฉด ๊ฐ€์น˜๊ฐ€ ์—†๋‹ค. + +๊ฒฐ๊ตญ, ๋ฐœ์ƒ ํ™•๋ฅ ์ด ์ ์„์ˆ˜๋ก ์ •๋ณด์˜ ๊ฐ€์น˜๋Š” ์˜ฌ๋ผ๊ฐ€๊ฒŒ ๋œ๋‹ค. ๊ทธ๋Ÿฌ๋ฏ€๋กœ ์ •๋ณด๋Ÿ‰์„ ์ •์˜ํ•  ๋•Œ์—๋Š” ํ™•๋ฅ ๊ณผ ๋ฐ˜๋น„๋ก€์˜ ๊ด€๊ณ„๋ฅผ ๊ฐ€์ง€๋„๋ก ํ•˜๋Š” ๊ฒƒ์ด ์ข‹์„ ๊ฒƒ์ด๋‹ค. +์ด๋Ÿฌํ•œ ์ด์œ ๋กœ ์ •๋ณด๋Ÿ‰์€ ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์ •์˜๋œ๋‹ค. + +\\[ I\left(x\right) = -\log_b{ P\left( x\right ) } \\] + +์–ด๋–ค ์‚ฌ๊ฑด์ด ์ผ์–ด๋‚œ ํ™•๋ฅ ์˜ ์—ญ์ˆ˜์— ๋กœ๊ทธ๋ฅผ ์ทจํ•œ ํ˜•ํƒœ์ด๋‹ค. ์—ฌ๊ธฐ์„œ ๋กœ๊ทธ๋ฅผ ์ทจํ•œ ์ด์œ ๋Š” ๋‘ ์‚ฌ๊ฑด์˜ ์ •๋ณด๋Ÿ‰์„ ํ•ฉ์นœ ๊ฒƒ์ด ๊ฐ ์‚ฌ๊ฑด์˜ ์ •๋ณด๋Ÿ‰์„ ๋”ํ•œ ๊ฒƒ๊ณผ ๊ฐ™์•„์•ผ ํ•˜๊ธฐ ๋•Œ๋ฌธ์ด๋‹ค (๋กœ๊ทธ๋Š” ๊ณฑ์„ ํ•ฉ์œผ๋กœ ๋ฐ”๊ฟ€ ์ˆ˜ ์žˆ๋‹ค). +์ด ๋•Œ ๋ฐ‘ $b$๋Š” ์‚ฌ์šฉ์ฒ˜์— ๋”ฐ๋ผ 2, 10, $e$๋ฅผ ๋งŽ์ด ์‚ฌ์šฉํ•œ๋‹ค. + +์ •๋ณด๋Ÿ‰๊นŒ์ง€ ์ •์˜๋ฅผ ํ–ˆ์œผ๋‹ˆ ์—”ํŠธ๋กœํ”ผ๋ฅผ ์ •์˜ํ•  ์ฐจ๋ก€์ด๋‹ค. ์•ž์„œ ์—”ํŠธ๋กœํ”ผ๋Š” ์ •๋ณด๋Ÿ‰์˜ ๊ธฐ๋Œ“๊ฐ’์ด๋ผ๊ณ  ํ•˜์˜€๋‹ค. ์ฆ‰ ๊ฐ ์‚ฌ๊ฑด์— ๋Œ€ํ•œ ํ™•๋ฅ ๊ณผ ์ •๋ณด๋Ÿ‰์˜ ๊ณฑ์„ ๋ชจ๋‘ ํ•ฉํ•œ ๊ฒƒ์œผ๋กœ ์ •์˜ํ•  ์ˆ˜ ์žˆ๋‹ค. + +\\[ H\left(x\right) = E\left( I\left ( x\right ) \right) = -\sum^n_{i=1} p\left( x_i \right) \log_b{ p\left( x_i\right ) } \\] + +์—”ํŠธ๋กœํ”ผ๋Š” ์ •๋ณด๋Ÿ‰์ด ํด์ˆ˜๋ก ์ปค์ง€๋Š” ์„ฑ์งˆ์„ ๊ฐ€์ง„๋‹ค. ๋‹ค๋ฅธ ์ •๋ณด์ด๋ก ์—์„œ๋Š” ๋ชจ๋ฅด๊ฒ ์ง€๋งŒ ๋จธ์‹ ๋Ÿฌ๋‹ ๋ถ„์•ผ์—์„œ๋Š” ์˜ˆ์ธก์„ ํ•˜๋Š” ๊ฒƒ์ด ๋ชฉ์ ์ด๋ฏ€๋กœ ์˜ˆ์ธก์— ๋งž์ถฐ ๋‹ค์‹œ ๋ง์„ ์ •๋ฆฌํ•˜๋ฉด, **์˜ˆ์ธก์ด ํž˜๋“ค์ˆ˜๋ก ๋” ํฌ๋‹ค**๋Š” ๊ฒƒ์ด ๋œ๋‹ค. + +์ด๊ฒƒ์ด ์•ž์„œ **์˜ˆ์ธกํ•˜๊ธฐ ์–ด๋ ค์šด ์ •๋„**๊ฐ€ ๊ฐ€์žฅ ์™€๋‹ฟ์•˜๋‹ค๊ณ  ํ•˜๋Š” ์ด์œ ์˜€๋‹ค. + +๋‹ค์Œ ํฌ์ŠคํŒ…์—์„œ๋Š” ์ด ์—”ํŠธ๋กœํ”ผ๋ฅผ ์ด์šฉํ•œ ๋Œ€ํ‘œ์ ์ธ loss์ธ cross entropy๋ฅผ ์•Œ์•„๋ณด๊ธฐ ์œ„ํ•ด KL divergence์— ๋Œ€ํ•ด ๋จผ์ € ์•Œ์•„๋ณด๋ ค ํ•œ๋‹ค. diff --git a/_posts/2022-04-27-KL-divergence.markdown b/_posts/2022-04-27-KL-divergence.markdown new file mode 100644 index 00000000000..bd784438480 --- /dev/null +++ b/_posts/2022-04-27-KL-divergence.markdown @@ -0,0 +1,58 @@ +--- +title: "KL divergence(์ฟจ๋ฐฑ-๋ผ์ด๋ธ”๋Ÿฌ ๋ฐœ์‚ฐ)" +author: Kwon +date: 2022-04-27T00:00:00+0900 +categories: [background] +tags: [entropy, kl-divergence] +math: true +mermaid: false +--- + +### Related Post + +1. [Entropy](/posts/entropy/) +2. **KL Divergence** +3. [Cross Entropy](/posts/cross-entropy/) + +*** + +์ด๋ฒˆ์—๋Š” ์—”ํŠธ๋กœํ”ผ์— ์ด์–ด ์ฟจ๋ฐฑ-๋ผ์ด๋ธ”๋Ÿฌ ๋ฐœ์‚ฐ(Kullback-Leibler divergence)์— ๋Œ€ํ•ด ์•Œ์•„๋ณด๋ ค ํ•œ๋‹ค. + +*** +## Kullback-Leibler divergence, KLD + +์ฟจ๋ฐฑ-๋ผ์ด๋ธ”๋Ÿฌ ๋ฐœ์‚ฐ์€ ์–ด๋–ค ๋‘ ํ™•๋ฅ ๋ถ„ํฌ์˜ ์ฐจ์ด๋ฅผ ๊ณ„์‚ฐํ•˜๊ธฐ ์œ„ํ•œ ํ•จ์ˆ˜๋กœ, ๋‘ ํ™•๋ฅ ๋ถ„ํฌ์˜ [์ •๋ณด ์—”ํŠธ๋กœํ”ผ](/posts/entropy/)์˜ ์ฐจ์ด๋ฅผ ๊ณ„์‚ฐํ•œ๋‹ค. + +์˜๋ฏธ์ ์œผ๋กœ๋Š” ํ™•๋ฅ ๋ถ„ํฌ $P$๊ฐ€ ์žˆ์„ ๋•Œ ๊ทธ ๋ถ„ํฌ๋ฅผ ๊ทผ์‚ฌ์ ์œผ๋กœ ํ‘œํ˜„ํ•˜๋Š” $Q$๋ฅผ $P$ ๋Œ€์‹  ์ด์šฉํ•˜์—ฌ ์ƒ˜ํ”Œ๋ง ํ•  ๊ฒฝ์šฐ ์—”ํŠธ๋กœํ”ผ์˜ ๋ณ€ํ™”๋ฅผ ์˜๋ฏธํ•œ๋‹ค. + +์‹œ๊ฐ์ ์œผ๋กœ๋„ ํ•œ๋ฒˆ ํ™•์ธํ•ด ๋ณด์ž. +์—ฌ๊ธฐ ํ™•๋ฅ ๋ถ„ํฌ $P$(ํŒŒ๋ž€์ƒ‰)์™€ $Q$(์ดˆ๋ก์ƒ‰)๊ฐ€ ์žˆ๋‹ค. ๊ทธ๋ฆฌ๊ณ  ๋‘ ํ™•๋ฅ ๋ถ„ํฌ์— ๋Œ€ํ•œ ์—”ํŠธ๋กœํ”ผ ์ฐจ์ด๋ฅผ ๊ณ„์‚ฐํ•˜์—ฌ ํ‘œํ˜„ํ•œ ๊ณก์„ (๋นจ๊ฐ„์ƒ‰)์„ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. + +![](/posting_imgs/kl-1.jpg) + +๊ทธ๋ž˜ํ”„์—์„œ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋“ฏ์ด (๋‹น์—ฐํ•˜๊ฒŒ๋„) ๋ถ„ํฌ๊ฐ„์˜ ์ฐจ์ด๊ฐ€ ํฐ ๊ณณ์—์„œ๋Š” ์—”ํŠธ๋กœํ”ผ์˜ ์ฐจ์ด๊ฐ€ ํฌ๊ณ  ์ฐจ์ด๊ฐ€ ์ ์€ ๊ณณ์—์„œ๋Š” ์ค„์–ด๋“ ๋‹ค. ์‹ฌ์ง€์–ด ๋‘ ํ™•๋ฅ ๋ถ„ํฌ์˜ ๊ต์ (์ ์„ )์—์„œ๋Š” ๊ฐ’์ด ๊ฐ™๊ธฐ ๋•Œ๋ฌธ์— ์—”ํŠธ๋กœํ”ผ์˜ ์ฐจ์ด๋„ 0์ด ๋˜๋Š” ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. + +์ด์ œ ์ˆ˜์‹์œผ๋กœ๋„ ํ•œ๋ฒˆ ํ‘œํ˜„ํ•ด ๋ณด์ž. +$P$์˜ ์—”ํŠธ๋กœํ”ผ๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™๊ณ  + +\\[ H\left( P \right)= -\sum^n_{i=1} p\left( x_i \right) \log{ p\left( x_i\right ) } \\] + +$P$๋Œ€์‹  $Q$๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ์ƒ˜ํ”Œ๋งํ•  ๊ฒฝ์šฐ ์—”ํŠธ๋กœํ”ผ๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์ •์˜๋œ๋‹ค. ($Q$์˜ ์ •๋ณด๋Ÿ‰์— ๋Œ€ํ•œ ๊ธฐ๋Œ“๊ฐ’) + +\\[ H\left( P, Q \right)= -\sum^n_{i=1} p\left( x_i \right) \log{ q\left( x_i\right ) } \\] + +์ฟจ๋ฐฑ-๋ผ์ด๋ธ”๋Ÿฌ๋Š” ์ด๋“ค์˜ ์ฐจ์ด๋ผ๊ณ  ํ–ˆ์œผ๋ฏ€๋กœ ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์ •์˜ํ•  ์ˆ˜ ์žˆ๋‹ค. + +$ KL\left( P\|\|Q \right) = H\left( P, Q \right) - H\left( P \right) $ +$ = \left( -\sum^n_{i=1} p\left( x_i \right) \log{ q\left( x_i\right ) } \right) - \left( -\sum^n_{i=1} p\left( x_i \right) \log{ p\left( x_i\right ) } \right) $ +\\[ = -\sum^n_{i=1} p\left( x_i \right) \log{ \frac{ q\left( x_i\right )}{ p\left( x_i\right ) } } \\] + +์ด๋•Œ ์ด์‚ฐํ™•๋ฅ ๋ถ„ํฌ์˜ ์ฟจ๋ฐฑ-๋ผ์ด๋ธ”๋Ÿฌ ๋ฐœ์‚ฐ์€ ์œ„์—์„œ ํ‘œํ˜„ํ•œ ๊ฒƒ๊ณผ ๊ฐ™์ด ์ด ํ•ฉ์œผ๋กœ ๋‚˜ํƒ€๋‚ผ ์ˆ˜ ์žˆ์œผ๋ฉฐ ๋‹ค์Œ๊ณผ ๊ฐ™๊ณ  + +\\[ KL\left( P\|\|Q \right) = \sum_i P\left( i \right) \log{ \frac{ P\left( i \right )}{ Q\left( i \right ) } } \\] + +์—ฐ์†ํ™•๋ฅ ๋ถ„ํฌ์˜ ์ฟจ๋ฐฑ-๋ผ์ด๋ธ”๋Ÿฌ ๋ฐœ์‚ฐ์€ ์ ๋ถ„ ๊ฐ’์œผ๋กœ ์ฃผ์–ด์ง„๋‹ค. (์ด๋•Œ $p, q$๋Š” ๊ฐ ํ™•๋ฅ ๋ถ„ํฌ์˜ ํ™•๋ฅ ๋ฐ€๋„ ํ•จ์ˆ˜์ด๋‹ค.) + +\\[ KL\left( P\|\|Q \right) = \int^\infty_{-\infty} p\left( x \right) \log{ \frac{ p\left( x \right )}{ q\left( x \right ) } }dx \\] + +์—”ํŠธ๋กœํ”ผ์™€ ์ฟจ๋ฐฑ๋ผ์ด๋ธ”๋Ÿฌ ๋ฐœ์‚ฐ์„ ์•Œ์•„๋ดค์œผ๋‹ˆ ์ด์–ด์„œ ๋‹ค์Œ ํฌ์ŠคํŒ…์€ cross entropy ๋‚ด์šฉ์— ๋Œ€ํ•ด ์•Œ์•„๋ณด๋„๋ก ํ•˜๊ฒ ๋‹ค. \ No newline at end of file diff --git a/_posts/2022-04-28-cross-entropy.markdown b/_posts/2022-04-28-cross-entropy.markdown new file mode 100644 index 00000000000..523948d08e1 --- /dev/null +++ b/_posts/2022-04-28-cross-entropy.markdown @@ -0,0 +1,40 @@ +--- +title: "Cross Entropy(๊ต์ฐจ ์—”ํŠธ๋กœํ”ผ)" +author: Kwon +date: 2022-04-28T00:10:00+0900 +categories: [background, loss] +tags: [entropy, kl-divergence, cross-entropy] +math: true +mermaid: false +--- + +### Related Post + +1. [Entropy](/posts/entropy/) +2. [KL Divergence](/posts/KL-divergence/) +3. **Cross Entropy** + +*** +## Cross Entropy + +๊ต์ฐจ ์—”ํŠธ๋กœํ”ผ์˜ ์˜๋ฏธ๋Š” ์ด๋ฆ„์—์„œ ์ฐพ์•„๋ณผ ์ˆ˜ ์žˆ๋‹ค. ๋จผ์ € ๊ต์ฐจ ์—”ํŠธ๋กœํ”ผ์˜ ์‹์„ ํ•œ๋ฒˆ ๋ณด์ž. + +\\[ H\left( P, Q \right) = -\sum^n_{i=1} p\left( x_i \right) \log{ q\left( x_i\right ) } \\] + +์—”ํŠธ๋กœํ”ผ ์‹์— $P$์™€ $Q$์˜ ๋ฐ€๋„ํ•จ์ˆ˜๋“ค์ด **๊ต์ฐจ**ํ•ด์„œ ๋“ค์–ด๊ฐ€ ์žˆ๋‹ค. ๊ทธ๋Ÿฐ ์˜๋ฏธ์—์„œ ๊ต์ฐจ ์—”ํŠธ๋กœํ”ผ๋ผ๋Š” ์ด๋ฆ„์ด ๋ถ™์€ ๊ฒƒ์ด๋‹ค. + +์ด ์‹์˜ ์˜๋ฏธ๋Š” [KL divergence ํฌ์ŠคํŒ…](/posts/KL-divergence/)์—์„œ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋“ฏ์ด ํ™•๋ฅ ๋ถ„ํฌ $P$๋ฅผ ๊ทผ์‚ฌํ•˜๋Š” $Q$๋ฅผ $P$ ๋Œ€์‹  ์‚ฌ์šฉํ•˜์—ฌ ์ƒ˜ํ”Œ๋งํ–ˆ์„ ๋•Œ์˜ ์—”ํŠธ๋กœํ”ผ๋ฅผ ๋งํ•œ๋‹ค. + +๊ทธ๋Ÿฐ๋ฐ ์–ด๋–ป๊ฒŒ ์ด๋Ÿฐ ์˜๋ฏธ๋ฅผ ๊ฐ€์ง€๋Š” ๊ต์ฐจ ์—”ํŠธ๋กœํ”ผ๊ฐ€ ๊ทธ ์ž์ฒด๋กœ loss์˜ ์—ญํ• ์„ ํ•  ์ˆ˜ ์žˆ๋Š” ๊ฑธ๊นŒ? ์‹ค์ œ๋กœ loss๋ฅผ ๊ตฌํ•˜๊ธฐ ์œ„ํ•ด์„œ๋Š” ์›๋ž˜ ๋ถ„ํฌ์˜ ์—”ํŠธ๋กœํ”ผ์™€์˜ ์ฐจ์ด๋ฅผ ๊ตฌํ•ด์•ผ ํ•  ๊ฒƒ์ด๋‹ค. ์ด๋Š” ์ฟจ๋ฐฑ-๋ผ์ด๋ธ”๋Ÿฌ ๋ฐœ์‚ฐ์˜ ์˜๋ฏธ์™€ ๋™์ผํ•˜๋‹ค. +์ฟจ๋ฐฑ-๋ผ์ด๋ธ”๋Ÿฌ ๋ฐœ์‚ฐ์˜ ์‹์€ ๋‹ค์Œ๊ณผ ๊ฐ™์•˜๋‹ค. + +\\[ KL\left( P\|\|Q \right) = H\left( P, Q \right) - H\left( P \right) \\] + +๋ถ„๋ช… ๊ต์ฐจ ์—”ํŠธ๋กœํ”ผ ๋ง๊ณ ๋„ ์›๋ž˜ ํ™•๋ฅ ๋ถ„ํฌ์˜ ์—”ํŠธ๋กœํ”ผ์ธ $ H\left( P \right) $ ํ•ญ์ด ์กด์žฌํ•œ๋‹ค. ํ•˜์ง€๋งŒ ์šฐ๋ฆฌ๊ฐ€ loss๋ฅผ ์‚ฌ์šฉํ•˜๊ฒŒ ๋˜๋Š” ์‹ค์ œ classification problem์— ๋Œ€ํ•ด์„œ ํ•œ๋ฒˆ ์ƒ๊ฐํ•ด ๋ณด์ž. + +classification problem์—์„œ ์šฐ๋ฆฌ๋Š” ํ•œ data์— ๋Œ€ํ•ด ์ •๋‹ต์ด ์ฃผ์–ด์ ธ ์žˆ๋‹ค๊ณ  ๊ฐ€์ •ํ•œ๋‹ค. ์ด ๋ง์€ **์ด๋ฏธ ์ •ํ•ด์ ธ ์žˆ๋Š” ์ •๋‹ต์— ์†ํ•˜๋Š” ๊ฒƒ์ด ๋ถˆํ™•์‹คํ•˜์ง€ ์•Š๋‹ค**๋Š” ๋ง์ด๊ธฐ ๋•Œ๋ฌธ์— $P = 1$์ด๊ณ  ๊ทธ์— ๋”ฐ๋ผ $ H\left( P \right) = 0$์ด๋‹ค. +๊ทธ๋Ÿฌ๋ฏ€๋กœ $ H\left( P \right) $ ํ•ญ์„ ๋ฌด์‹œํ•˜๊ณ  ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์ ์„ ์ˆ˜ ์žˆ๋‹ค. + +\\[ KL\left( P\|\|Q \right) = H\left( P, Q \right) = -\sum^n_{i=1} p\left( x_i \right) \log{ q\left( x_i\right ) } \\] + +์ด๋Ÿฐ ์ด์œ  ๋•Œ๋ฌธ์— ๊ต์ฐจ ์—”ํŠธ๋กœํ”ผ ์ž์ฒด๊ฐ€ loss๋กœ ๊ธฐ๋Šฅํ•  ์ˆ˜ ์žˆ๋Š” ๊ฒƒ์ด๋‹ค. \ No newline at end of file diff --git a/_posts/2022-04-29-dlZeroToAll-PyTorch-4_1.markdown b/_posts/2022-04-29-dlZeroToAll-PyTorch-4_1.markdown new file mode 100644 index 00000000000..c4fed6ec508 --- /dev/null +++ b/_posts/2022-04-29-dlZeroToAll-PyTorch-4_1.markdown @@ -0,0 +1,274 @@ +--- +title: "๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹ 2 - Lab4_1: Multivariate Linear Regression" +author: Kwon +date: 2022-04-29T14:00:00+0900 +categories: [pytorch, study] +tags: [multivariate-linear-regressoin] +math: true +mermaid: false +--- +[๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹](https://deeplearningzerotoall.github.io/season2/lec_pytorch.html) Lab 4_1: Multivariate Linear Regression ๊ฐ•์˜๋ฅผ ๋ณธ ํ›„ ๊ณต๋ถ€๋ฅผ ๋ชฉ์ ์œผ๋กœ ์ž‘์„ฑํ•œ ๊ฒŒ์‹œ๋ฌผ์ž…๋‹ˆ๋‹ค. + +*** +## Theoretical Overview +์ด์ „ ํฌ์ŠคํŒ…๊นŒ์ง€์˜ regression์€ ํ•˜๋‚˜์˜ ๋ณ€์ˆ˜๋ฅผ ํ†ตํ•ด ์˜ˆ์ธก์„ ํ•˜๋Š” ๊ฒƒ์ด์—ˆ๋‹ค. ํ•˜์ง€๋งŒ ์ง๊ด€์ ์œผ๋กœ ์ƒ๊ฐํ•ด ๋ณด๋”๋ผ๋„ ์—ฌ๋Ÿฌ ๋ณ€์ˆ˜๋ฅผ ๊ฐ€์ง€๊ณ  ๋” ๋งŽ์€ ์ •๋ณด๋ฅผ ํ†ตํ•ด ์˜ˆ์ธกํ•˜๋ฉด ๋” ์ข‹์€ ๊ฒฐ๊ณผ๊ฐ€ ๋‚˜์˜ฌ ๊ฒƒ ๊ฐ™๋‹ค. +**Multivariate Linear Regression**์€ ์ด์ฒ˜๋Ÿผ ์—ฌ๋Ÿฌ ๋ณ€์ˆ˜๋ฅผ input์œผ๋กœ ์‚ฌ์šฉํ•˜์—ฌ ์˜ˆ์ธกํ•˜๋Š” ๊ฒƒ์„ ๋งํ•œ๋‹ค. + +Hypothesis์™€ Cost๋Š” ์•ž์—์„œ ์‚ฌ์šฉํ–ˆ๋˜ ๊ฒƒ๋“ค์„ ์‚ฌ์šฉํ•˜๋˜ ๋ณ€์ˆ˜๊ฐ€ ๋Š˜์–ด๋‚œ ๊ฒƒ์— ๋งž์ถฐ ์ˆ˜์ •ํ•ด์•ผ ํ•œ๋‹ค. ์ˆ˜์ •ํ•ด์„œ ๋‚˜์˜จ hypothesis์™€ cost๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. + +\\[ H(x_1, x_2, x_3) = x_1w_1 + x_2w_2 + x_3w_3 + b \\] +\\[ cost(W, b) = \frac{1}{m} \sum^m_{i=1} \left( H(x^{(i)}) - y^{(i)} \right)^2 \\] + +๋ณ€์ˆ˜์˜ ์ˆ˜์— ๋”ฐ๋ผ hypothesis๊ฐ€ ๋ณ€ํ•œ ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. ์ด๋•Œ ๊ฐ๊ฐ์˜ ๋ณ€์ˆ˜์— ๋Œ€ํ•ด $W$๊ฐ€ ๋”ฐ๋กœ ์ฃผ์–ด์ง„๋‹ค. + +*** +## Import +{% highlight python %} +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.optim as optim +{% endhighlight %} + +*** +## Data and Hypothesis +### Data +๋ฐ์ดํ„ฐ๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™์ด 3๊ฐœ์˜ ํ€ด์ฆˆ ์ ์ˆ˜(x)์™€ ์ตœ์ข… ์‹œํ—˜ ์ ์ˆ˜(y)๋ฅผ ํฌํ•จํ•˜๋Š” ์…‹์„ ์‚ฌ์šฉํ•  ๊ฒƒ์ด๋‹ค. + +| Quiz 1 (x1) | Quiz 2 (x2) | Quiz 3 (x3) | Final (y) | +|:-----------:|:-----------:|:-----------:|:---------:| +| 73 | 80 | 75 | 152 | +| 93 | 88 | 93 | 185 | +| 89 | 91 | 80 | 180 | +| 96 | 98 | 100 | 196 | +| 73 | 66 | 70 | 142 | +{:.inner-borders} + +{% highlight python %} +x1_train = torch.FloatTnsor([[73], [93], [89], [96], [73]]) +x2_train = torch.FloatTensor([[80], [88], [91], [98], [66]]) +x3_train = torch.FloatTensor([[75], [93], [90], [100], [70]]) + +y_train = torch.FloatTensor([[152], [185], [180], [196], [142]]) +{% endhighlight %} +
+### Hypothesis Function +์œ„ ๋ฐ์ดํ„ฐ์— ๋Œ€ํ•ด hypothesis๋ฅผ ์ฝ”๋“œ๋กœ ํ‘œํ˜„ํ•˜๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™์ด ํ‘œํ˜„ํ•  ์ˆ˜ ์žˆ๋‹ค. + +{% highlight python %} +hypothesis = x1_train * w1 + x2_train * w2 + x3_train * w3 + b +{% endhighlight %} + +ํ•˜์ง€๋งŒ input ๋ณ€์ˆ˜์˜ ๊ฐœ์ˆ˜๊ฐ€ 100๊ฐœ, 1000๊ฐœ ์ผ ๋•Œ๋„ ์œ„์™€ ๊ฐ™์ด hypothesis์™€ ๋ฐ์ดํ„ฐ์…‹์„ ์ •์˜ํ•  ์ˆ˜๋Š” ์—†๋Š” ๋…ธ๋ฆ‡์ด๋‹ค. ์ด๊ฑธ ๊ฐ„๋‹จํ•˜๊ฒŒ ๋งŒ๋“ค๊ธฐ ์œ„ํ•ด $x_1w_1 + x_2w_2 + x_3w_3$๋ฅผ ํ–‰๋ ฌ๊ณฑ์œผ๋กœ ๋ฐ”๊ฟ” ์ƒ๊ฐํ•ด ๋ณด์ž. + +๊ฐ ๋ณ€์ˆ˜์™€ ๊ฐ€์ค‘์น˜์˜ ๊ณฑ์€ ์•„๋ž˜์™€ ๊ฐ™์€ ํ–‰๋ ฌ ๊ณฑ๊ณผ ๊ฐ™๋‹ค. + +\\[ +\begin{pmatrix} +x_1 & x_2 & x_3 +\end{pmatrix} +\cdot +\begin{pmatrix} +w_1 \\\\\\ +w_2 \\\\\\ +w_3 \\\\\\ +\end{pmatrix} += +\begin{pmatrix} +x_1w_1 + x_2w_2 + x_3w_3 +\end{pmatrix} +\\] + +๊ทธ๋Ÿฌ๋ฏ€๋กœ [lab1](/posts/dlZeroToAll-PyTorch-1/)์—์„œ ๋‚˜์™”๋˜ `matmul()`์„ ์‚ฌ์šฉํ•˜์—ฌ ๋ณ€์ˆ˜์™€ ๊ฐ€์ค‘์น˜์˜ ๊ณฑ์„ ๊ฐ„๋‹จํ•˜๊ฒŒ ๊ตฌํ•  ์ˆ˜ ์žˆ๋‹ค. + +{% highlight python %} +x_train = torch.FloatTensor([[73, 80, 75], + [93, 88, 93], + [89, 91, 90], + [96, 98, 100], + [73, 66, 70]]) +y_train = torch.FloatTensor([[152], [185], [180], [196], [142]]) + +hypothesis = x_train.matmul(W) # W์˜ ์ฐจ์›์€ [๋ณ€์ˆ˜์˜ ๊ฐœ์ˆ˜, 1]๋กœ ๋งž์ถฐ ์ค„ ๊ฒƒ +{% endhighlight %} + +์ด๋•Œ ์ฃผ์˜ํ•  ๊ฒƒ์€ ํ–‰๋ ฌ๊ณฑ ์—ฐ์‚ฐ์ด ๊ฐ€๋Šฅํ•˜๋„๋ก `W`์˜ ์ฐจ์›์„ ๋ณ€์ˆ˜์— ๊ฐœ์ˆ˜์— ๋”ฐ๋ผ ๋งž์ถฐ์ค˜์•ผ ํ•œ๋‹ค๋Š” ๊ฒƒ์ด๋‹ค. + + +*** +## Train +Multivariate Linear Regression์˜ hypothesis๊นŒ์ง€ ๋ชจ๋‘ ์•Œ์•„๋ดค์œผ๋‹ˆ ํ•™์Šตํ•˜๋Š” ์ „์ฒด์ ์ธ ์ฝ”๋“œ๋ฅผ ์ž‘์„ฑํ•ด ๋ณด์ž. + +[lab3](/posts/dlZeroToAll-PyTorch-3/)์˜ **Train with `optim`**์˜ ์ฝ”๋“œ์™€ ํฌ๊ฒŒ ๋‹ค๋ฅด์ง€ ์•Š์ง€๋งŒ +`W = torch.zeros((3, 1), requires_grad=True)`์—์„œ ๊ฐ€์ค‘์น˜์˜ ์ฐจ์›์„ ๋ณ€์ˆ˜์˜ ๊ฐœ์ˆ˜์— ๋”ฐ๋ผ ๋งž์ถฐ์ค€ ๊ฒƒ๋งŒ ์ฐจ์ด๊ฐ€ ๋‚œ๋‹ค. + +{% highlight python %} +# ๋ชจ๋ธ ์ดˆ๊ธฐํ™” +W = torch.zeros((3, 1), requires_grad=True) +b = torch.zeros(1, requires_grad=True) +# optimizer ์„ค์ • +optimizer = optim.SGD([W, b], lr=1e-5) + +nb_epochs = 20 +for epoch in range(nb_epochs + 1): + + # H(x) ๊ณ„์‚ฐ + hypothesis = x_train.matmul(W) + b # or .mm or @ + + # cost ๊ณ„์‚ฐ + cost = torch.mean((hypothesis - y_train) ** 2) + + # cost๋กœ H(x) ๊ฐœ์„  + optimizer.zero_grad() + cost.backward() + optimizer.step() + + # ๋กœ๊ทธ ์ถœ๋ ฅ + print('Epoch {:4d}/{} hypothesis: {} Cost: {:.6f}'.format( + epoch, nb_epochs, hypothesis.squeeze().detach(), cost.item() + )) + +'''output +Epoch 0/20 hypothesis: tensor([0., 0., 0., 0., 0.]) Cost: 29661.800781 +Epoch 1/20 hypothesis: tensor([67.2578, 80.8397, 79.6523, 86.7394, 61.6605]) Cost: 9298.520508 +Epoch 2/20 hypothesis: tensor([104.9128, 126.0990, 124.2466, 135.3015, 96.1821]) Cost: 2915.712402 +Epoch 3/20 hypothesis: tensor([125.9942, 151.4381, 149.2133, 162.4896, 115.5097]) Cost: 915.040649 +Epoch 4/20 hypothesis: tensor([137.7967, 165.6247, 163.1911, 177.7112, 126.3307]) Cost: 287.936157 +Epoch 5/20 hypothesis: tensor([144.4044, 173.5674, 171.0168, 186.2332, 132.3891]) Cost: 91.371010 +Epoch 6/20 hypothesis: tensor([148.1035, 178.0143, 175.3980, 191.0042, 135.7812]) Cost: 29.758249 +Epoch 7/20 hypothesis: tensor([150.1744, 180.5042, 177.8509, 193.6753, 137.6805]) Cost: 10.445281 +Epoch 8/20 hypothesis: tensor([151.3336, 181.8983, 179.2240, 195.1707, 138.7440]) Cost: 4.391237 +Epoch 9/20 hypothesis: tensor([151.9824, 182.6789, 179.9928, 196.0079, 139.3396]) Cost: 2.493121 +Epoch 10/20 hypothesis: tensor([152.3454, 183.1161, 180.4231, 196.4765, 139.6732]) Cost: 1.897688 +Epoch 11/20 hypothesis: tensor([152.5485, 183.3609, 180.6640, 196.7389, 139.8602]) Cost: 1.710555 +Epoch 12/20 hypothesis: tensor([152.6620, 183.4982, 180.7988, 196.8857, 139.9651]) Cost: 1.651412 +Epoch 13/20 hypothesis: tensor([152.7253, 183.5752, 180.8742, 196.9678, 140.0240]) Cost: 1.632369 +Epoch 14/20 hypothesis: tensor([152.7606, 183.6184, 180.9164, 197.0138, 140.0571]) Cost: 1.625924 +Epoch 15/20 hypothesis: tensor([152.7802, 183.6427, 180.9399, 197.0395, 140.0759]) Cost: 1.623420 +Epoch 16/20 hypothesis: tensor([152.7909, 183.6565, 180.9530, 197.0538, 140.0865]) Cost: 1.622141 +Epoch 17/20 hypothesis: tensor([152.7968, 183.6643, 180.9603, 197.0618, 140.0927]) Cost: 1.621262 +Epoch 18/20 hypothesis: tensor([152.7999, 183.6688, 180.9644, 197.0661, 140.0963]) Cost: 1.620501 +Epoch 19/20 hypothesis: tensor([152.8014, 183.6715, 180.9665, 197.0686, 140.0985]) Cost: 1.619764 +Epoch 20/20 hypothesis: tensor([152.8020, 183.6731, 180.9677, 197.0699, 140.0999]) Cost: 1.619046 +''' +{% endhighlight %} + +hypothesis์˜ ์ถœ๋ ฅ๊ฐ’๊ณผ cost๋ฅผ ๋ณด๋ฉด ์ •๋‹ต์— ์ ์  ๊ทผ์ ‘ํ•˜๋Š” ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. ์ด ๊ฒฝ์šฐ์—๋„ lab3์—์„œ ์–ธ๊ธ‰ํ•œ ๊ฒƒ๊ณผ ๊ฐ™์ด learning rate์— ๋”ฐ๋ผ cost์˜ ๊ทน์†Œ์ ์„ ์ง€๋‚˜์ณ ๋ฐœ์‚ฐํ•  ์ˆ˜๋„ ์žˆ๋‹ค. + +*** +## High-level Implementation with `nn.Module` +PyTorch์˜ `nn.Module`์„ ์ƒ์†ํ•˜์—ฌ ๋ชจ๋ธ์„ ์ƒ์„ฑํ•˜๋ฉด hypothesis ์ •์˜, cost ๊ณ„์‚ฐ ๋“ฑ์˜ ํ•™์Šต์— ํ•„์š”ํ•œ ์—ฌ๋Ÿฌ ์š”์†Œ๋“ค์„ ํŽธํ•˜๊ฒŒ ๋งŒ๋“ค๊ณ  ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ๋‹ค. + +### Model +`nn.Module`์„ ์ด์šฉํ•˜์—ฌ ๋ชจ๋ธ์„ ์ƒ์„คํ•  ๋•Œ๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™์€ ํ˜•์‹์„ ๋”ฐ๋ผ์•ผ ํ•œ๋‹ค. + +1. `nn.Module`๋ฅผ ์ƒ์†ํ•ด์•ผ ํ•œ๋‹ค. +2. `__init__`๊ณผ `forward()`๋ฅผ override ํ•ด์•ผํ•œ๋‹ค. + * `__init__`: ๋ชจ๋ธ์— ์‚ฌ์šฉ๋  module์„ ์ •์˜. `super().__init__()`์œผ๋กœ `nn.Module`์˜ ์†์„ฑ์œผ๋กœ ์ดˆ๊ธฐํ™” + * `forward()`: **Hypothesis**๊ฐ€ ๋“ค์–ด๊ฐˆ ๊ณณ. (Hypothesis์— input์„ ๋„ฃ์–ด predictํ•˜๋Š” ๊ฒƒ์„ forward ์—ฐ์‚ฐ์ด๋ผ๊ณ  ํ•จ) + +{% highlight python %} +class LinearRegressionModel(nn.Module): + def __init__(self): + super().__init__() + self.linear = nn.Linear(1, 1) + + def forward(self, x): + return self.linear(x) +{% endhighlight %} + +์ด๋ ‡๊ฒŒ ๋ชจ๋ธ์„ ์ •์˜ํ•˜๋ฉด ์šฐ๋ฆฌ๊ฐ€ ์ง์ ‘ W, b๋ฅผ ๋ช…์‹œํ•˜์—ฌ hypothesis๋ฅผ ์ •์˜ํ•ด ์ฃผ์ง€ ์•Š์•„๋„ ๋œ๋‹ค. gradient ๊ณ„์‚ฐ์ธ `backward()`๋Š” PyTorch๊ฐ€ ์•Œ์•„์„œ ํ•ด์ฃผ๊ธฐ ๋•Œ๋ฌธ์— ์‹ ๊ฒฝ์“ฐ์ง€ ์•Š์•„๋„ ๋œ๋‹ค. + +{% highlight python %} +# ๊ธฐ์กด์˜ ๋ชจ๋ธ ์ดˆ๊ธฐํ™” +W = torch.zeros((3, 1), requires_grad=True) +b = torch.zeros(1, requires_grad=True) + +hypothesis = x_train.matmul(W) + b + +# nn.Module์„ ํ™œ์šฉํ•œ ๋ชจ๋ธ ์ดˆ๊ธฐํ™” +class MultivariateLinearRegressionModel(nn.Module): + def __init__(self): + super().__init__() + self.linear = nn.Linear(3, 1) + + def forward(self, x): + return self.linear(x) +{% endhighlight %} + +Multivariate Linear Regression Model์„ ์ƒ์„ฑํ•  ๋•Œ๋„ `nn.Linear()`์˜ ์ฐจ์›์„ ์กฐ์ ˆํ•ด ์ฃผ๋Š” ๊ฒƒ์œผ๋กœ ๊ฐ„๋‹จํžˆ ๋ชจ๋ธ์„ ์ƒ์„ฑํ•  ์ˆ˜ ์žˆ๋‹ค. +์ด๋ ‡๊ฒŒ ๋ชจ๋ธ์˜ ์ˆ˜์ •์ด ์ž์œ ๋กœ์šด ๊ฒƒ์€ `nn.Module`์˜ ํ™•์žฅ์„ฑ์„ ์ž˜ ๋ณด์—ฌ์ฃผ๋Š” ๋ถ€๋ถ„์ด๋ผ๊ณ  ์ƒ๊ฐํ•œ๋‹ค. + +
+ +### Cost (`nn.functional`) +cost๋„ `torch.nn.functional`๋ฅผ ์ด์šฉํ•ด์„œ ๊ฐ„๋‹จํžˆ ์ •์˜ํ•  ์ˆ˜ ์žˆ๋‹ค. + +{% highlight python %} +cost = F.mse_loss(prediction, y_train) +{% endhighlight %} + +๊ณ„์† ์‚ฌ์šฉํ•˜๋˜ MSE๋ฅผ ์œ„์™€ ๊ฐ™์ด ์ •์˜ํ•˜๋ฉด ์ถ”ํ›„์— ๋‹ค๋ฅธ loss๋ฅผ ์‚ฌ์šฉํ•˜๋ ค๊ณ  ํ•  ๋•Œ loss ํ•จ์ˆ˜๋งŒ ๋ณ€๊ฒฝํ•ด ์ฃผ๋ฉด ๋˜๊ธฐ ๋•Œ๋ฌธ์— ํ›จ์”ฌ ํŽธ๋ฆฌํ•˜๋‹ค. + +
+ +### Training with `nn.Module` +์•ž์„œ ์ƒˆ๋กญ๊ฒŒ ์ •์˜ํ–ˆ๋˜ ๋ชจ๋ธ๊ณผ cost๋ฅผ ์ด์šฉํ•˜์—ฌ ์ „์ฒด ํ•™์Šต ์ฝ”๋“œ๋ฅผ ์ž‘์„ฑํ•˜๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. +๋‚˜๋จธ์ง€ ๋ถ€๋ถ„์€ ํฌ๊ฒŒ ์ฐจ์ด๊ฐ€ ์—†๊ณ , hypothesis์™€ cost๋ฅผ ์ •์˜ํ•˜๋Š” ๋ถ€๋ถ„์ด ๋” ๊ฐ„๊ฒฐํ•ด์ง„ ๋ชจ์Šต์„ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. + +{% highlight python %} +# ๋ฐ์ดํ„ฐ +x_train = torch.FloatTensor([[73, 80, 75], + [93, 88, 93], + [89, 91, 90], + [96, 98, 100], + [73, 66, 70]]) +y_train = torch.FloatTensor([[152], [185], [180], [196], [142]]) +# ๋ชจ๋ธ ์ดˆ๊ธฐํ™” +model = MultivariateLinearRegressionModel() +# optimizer ์„ค์ • +optimizer = optim.SGD(model.parameters(), lr=1e-5) + +nb_epochs = 20 +for epoch in range(nb_epochs+1): + + # H(x) ๊ณ„์‚ฐ + prediction = model(x_train) + + # cost ๊ณ„์‚ฐ + cost = F.mse_loss(prediction, y_train) + + # cost๋กœ H(x) ๊ฐœ์„  + optimizer.zero_grad() + cost.backward() + optimizer.step() + + # ๋กœ๊ทธ ์ถœ๋ ฅ + print('Epoch {:4d}/{} Cost: {:.6f}'.format( + epoch, nb_epochs, cost.item() + )) + +'''output +Epoch 0/20 Cost: 31667.599609 +Epoch 1/20 Cost: 9926.266602 +Epoch 2/20 Cost: 3111.514160 +Epoch 3/20 Cost: 975.451172 +Epoch 4/20 Cost: 305.908539 +Epoch 5/20 Cost: 96.042679 +Epoch 6/20 Cost: 30.260782 +Epoch 7/20 Cost: 9.641659 +Epoch 8/20 Cost: 3.178671 +Epoch 9/20 Cost: 1.152871 +Epoch 10/20 Cost: 0.517862 +Epoch 11/20 Cost: 0.318802 +Epoch 12/20 Cost: 0.256388 +Epoch 13/20 Cost: 0.236816 +Epoch 14/20 Cost: 0.230657 +Epoch 15/20 Cost: 0.228718 +Epoch 16/20 Cost: 0.228094 +Epoch 17/20 Cost: 0.227880 +Epoch 18/20 Cost: 0.227803 +Epoch 19/20 Cost: 0.227759 +Epoch 20/20 Cost: 0.227729 +''' +{% endhighlight %} diff --git a/_posts/2022-04-30-dlZeroToAll-PyTorch-4_2.markdown b/_posts/2022-04-30-dlZeroToAll-PyTorch-4_2.markdown new file mode 100644 index 00000000000..f38f9e97eb3 --- /dev/null +++ b/_posts/2022-04-30-dlZeroToAll-PyTorch-4_2.markdown @@ -0,0 +1,188 @@ +--- +title: "๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹ 2 - Lab4_2: Loading Data" +author: Kwon +date: 2022-04-30T00:00:00 +0900 +categories: [pytorch, study] +tags: [multivariate-linear-regressoin] +math: true +mermaid: false +--- + +[๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹](https://deeplearningzerotoall.github.io/season2/lec_pytorch.html) Lab 4_2: Loading Data ๊ฐ•์˜๋ฅผ ๋ณธ ํ›„ ๊ณต๋ถ€๋ฅผ ๋ชฉ์ ์œผ๋กœ ์ž‘์„ฑํ•œ ๊ฒŒ์‹œ๋ฌผ์ž…๋‹ˆ๋‹ค. + +*** +## Data in the Real World +[lab4_1](/posts/dlZeroToAll-PyTorch-4_1/)์—์„œ ๋‹ค๋ฃจ์—ˆ๋˜ Multivariate Linear Regression์—์„œ๋Š” ํ•™์Šต ๋ฐ์ดํ„ฐ๋กœ 3๊ฐœ์˜ ์ฐจ์›์„ ๊ฐ€์ง„ 5๊ฐœ์˜ ์ƒ˜ํ”Œ์„ ์‚ฌ์šฉํ–ˆ์—ˆ๋‹ค. +ํ•˜์ง€๋งŒ ์‹ค์ œ ์šฐ๋ฆฌ๊ฐ€ ๋‹ค๋ฃจ๋ ค๊ณ  ํ•˜๋Š” ๋ฐ์ดํ„ฐ๋Š” ๊ทธ ํฌ๊ธฐ๊ฐ€ ๊ทธ๋ฆฌ ๋งŒ๋งŒํ•˜์ง€ ์•Š๋‹ค. ๋ณธ ๊ฐ•์˜์—์„œ ์˜ˆ๋ฅผ ๋“ค์—ˆ๋˜ **ImageNet**์˜ ๊ฒฝ์šฐ 1400๋งŒ๊ฐœ ์ด์ƒ์„ ์ด๋ฏธ์ง€ ๋ฐ์ดํ„ฐ ์…‹์„ ํฌํ•จํ•˜๊ณ  ์žˆ์œผ๋ฉฐ ๊ทธ ์šฉ๋Ÿ‰์ด 120GB ์ด์ƒ์ด๋‹ค. +์ด ๋ฐ์ดํ„ฐ๋ฅผ ํ•œ๋ฒˆ์— ํ•™์Šต์„ ์ง„ํ–‰ํ•œ๋‹ค๋Š” ๊ฒƒ์€ ํ•˜๋“œ์›จ์–ด ์ƒ์˜ ์šฉ๋Ÿ‰ ๋ฌธ์ œ๋„ ์žˆ๊ฒ ์ง€๋งŒ, ํ•˜๋“œ์›จ์–ด์ ์ธ ๋ฌธ์ œ๊ฐ€ ํ•ด๊ฒฐ๋œ๋‹ค ํ•˜๋”๋ผ๋„ ๋А๋ฆฌ๊ณ  Gradient Descent ์‹œ์— cost ์—ฐ์‚ฐ์— ๋Œ€ํ•œ computing power ๋ถ€๋‹ด์ด ๋„ˆ๋ฌด ํฌ๋‹ค. + +์ด๋Ÿฐ ๋ฌธ์ œ์ ์„ ํ•ด๊ฒฐํ•˜๊ธฐ ์œ„ํ•ด ๋“ฑ์žฅํ•œ ๊ฒƒ์ด Minibatch์ด๋‹ค. ๋ฐ์ดํ„ฐ๊ฐ€ ๋„ˆ๋ฌด ํฌ๋‹ˆ๊นŒ **'๋ฐ์ดํ„ฐ์˜ ์ผ๋ถ€๋กœ ํ•™์Šตํ•˜๋ฉด ์–ด๋–จ๊นŒ?'**๋ผ๋Š” ์ƒ๊ฐ์— ๋‚˜์˜จ ๊ฐœ๋…์ด๋‹ค. + +*** +## Minibatch Gradient Descent +์•ž์—์„œ ์ž ๊ฐ„ ์–ธ๊ธ‰ํ•œ ๊ฒƒ์ฒ˜๋Ÿผ, ๋Œ€์šฉ๋Ÿ‰ ๋ฐ์ดํ„ฐ๋ฅผ ๊ท ์ผํ•˜๊ฒŒ ๋‚˜๋ˆ  gradient dscent๋ฅผ ํ•˜๋Š” ๊ฒƒ์„ Minibatch Gradient Descent๋ผ๊ณ  ํ•œ๋‹ค. +์ด๋ ‡๊ฒŒ ํ•™์Šต์„ ํ•˜๋Š” ๊ฒฝ์šฐ ์—…๋ฐ์ดํŠธ๊ฐ€ ๋น ๋ฅด๊ณ  ํ•˜๋“œ์›จ์–ด์ ์ธ ๋ถ€๋‹ด๋„ ๋œ ์ˆ˜ ์žˆ๋‹ค. +ํ•˜์ง€๋งŒ ์–ด๋””๊นŒ์ง€๋‚˜ ์ „์ฒด๊ฐ€ ์•„๋‹Œ ์ผ๋ถ€ ๋ฐ์ดํ„ฐ๋ฅผ ์จ์„œ ํ•™์Šตํ•˜๋Š” ๊ฒƒ์ด๊ธฐ ๋•Œ๋ฌธ์— ์ž˜๋ชป๋œ ๋ฐฉํ–ฅ์œผ๋กœ ํ•™์Šต์ด ๋  ์ˆ˜๋„ ์žˆ๊ณ , +๊ฒฐ๊ณผ์ ์œผ๋กœ ์–ด๋А์ •๋„ ์ˆ˜๋ ดํ•˜๋”๋ผ๋„ ๊ทธ ๊ณผ์ •์ด ์•„๋ž˜ ๊ทธ๋ฆผ๊ณผ ๊ฐ™์ด ์ข€ ๊ฑฐ์น ๊ฒŒ Gradient descent ๋  ์ˆ˜๋„ ์žˆ๋‹ค. (์•„๋ž˜ ๊ทธ๋ฆผ์€ ์ดํ•ด๋ฅผ ์œ„ํ•œ ์ž„์˜์˜ ๊ทธ๋ž˜ํ”„์ด๋ฉฐ, ์‹ค์ œ loss์˜ ๊ทธ๋ž˜ํ”„๊ฐ€ ์•„๋‹™๋‹ˆ๋‹ค.) + +![Minibatch Gradient Decsent](/posting_imgs/lab4_2-1.png) + +![Gradient Decsent](/posting_imgs/lab4_2-2.png) + +*** +## Dataset +### PyTorch Dataset +`torch.utils.data.Dataset`์„ ์ƒ์†ํ•˜์—ฌ ์‚ฌ์šฉ์ž ์ •์˜ ๋ฐ์ดํ„ฐ ์…‹์„ ๋งŒ๋“ค์–ด ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ๋‹ค. ์ด ๋ฐฉ์‹์œผ๋กœ ๋ฐ์ดํ„ฐ ์…‹์„ ์ •์˜ํ•  ๊ฒฝ์šฐ ๋‹ค์Œ๊ณผ ๊ฐ™์€ ํ˜•์‹์„ ๋”ฐ๋ผ์•ผ ํ•œ๋‹ค. +1. `torch.utils.data.Dataset`์„ ์ƒ์†ํ•ด์•ผ ํ•œ๋‹ค. +2. `__init__`, `__len__` ๊ทธ๋ฆฌ๊ณ  `__getitem__`์„ override ํ•ด์•ผ ํ•œ๋‹ค. + * `__init__` : ๋ฐ์ดํ„ฐ ์…‹์„ ๋งŒ๋“ค ๋•Œ(`Dataset`๊ฐ์ฒด๊ฐ€ ์ƒ์„ฑ๋  ๋•Œ)๋งŒ ์‹คํ–‰๋จ. ์‹ค์ œ๋„ ๋ฐ์ดํ„ฐ๋ฅผ ์ •์˜ํ•˜๋Š” ๊ณณ. + * `__len__` : ๋ฐ์ดํ„ฐ ์…‹์˜ ์ด ๋ฐ์ดํ„ฐ ์ˆ˜๋ฅผ ๋ฐ˜ํ™˜ํ•จ. + * `__getitem__`: ์ฃผ์–ด์ง„ ์ธ๋ฑ์Šค(`idx`)์— ๋”ฐ๋ผ ๊ทธ ์ธ๋ฑ์Šค์— ๋งž๋Š” x, y ๋ฐ์ดํ„ฐ๋ฅผ ๋ฐ˜ํ™˜ํ•จ. + +lab4_1์—์„œ ์‚ฌ์šฉํ–ˆ๋˜ ๋ฐ์ดํ„ฐ๋กœ ๋ฐ์ดํ„ฐ ์…‹์„ ๋งŒ๋“œ๋Š” ๊ณผ์ •์€ ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. + +```python +from torch.utils.data import Dataset + +class CustomDataset(Dataset): + def __init__(self): + self.x_data = [[73, 80, 75], + [93, 88, 93], + [89, 91, 90], + [96, 98, 100], + [73, 66, 70]] + self.y_data = [[152], [185], [180], [196], [142]] + + def __len__(self): + return len(self.x_data) + + def __getitem__(self, idx): + return torch.FloatTensor(self.x_data[idx]), torch.FloatTensor(self.y_data[idx]) + +dataset = CustomDataset() +``` + +์›ํ•˜๋Š” ๋ฐ์ดํ„ฐ๋ฅผ ๋„ฃ์–ด ๋ฐ์ดํ„ฐ ์…‹ class๋ฅผ ๋งŒ๋“  ํ›„ ์ธ์Šคํ„ด์Šค๋ฅผ ์ƒ์„ฑํ•ด ์ค€๋‹ค. + +### PyTorch DataLoader +`torch.utils.data.DataLoader`๋ฅผ ์‚ฌ์šฉํ•˜๋ฉด `Dataset` ๊ฐ์ฒด์˜ ๋ฐ์ดํ„ฐ์— ๋Œ€ํ•ด minibatch๋ฅผ ์ž๋™์œผ๋กœ ์ œ๊ณตํ•˜์—ฌ batch์— ๋Œ€ํ•œ ๋ฐ˜๋ณต์„ ์‰ฝ๊ฒŒ ํ•  ์ˆ˜ ์žˆ๋„๋ก ํ•ด์ค€๋‹ค. + +```python +from torch.utils.data import DataLoader + +dataloader = DataLoader( + dataset, + batch_size=2, + shuffle=True +) +``` + +์œ„์™€ ๊ฐ™์ด ์„ค์ •ํ•˜๋ฉด ๊ฐ minibatch๋“ค์˜ ํฌ๊ธฐ๋ฅผ 2๋กœ ๋‚˜๋ˆ„์–ด ์ œ๊ณตํ•œ๋‹ค. `batch_size`๋Š” ํ†ต์ƒ์ ์œผ๋กœ 2์˜ ์ œ๊ณฑ์ˆ˜(16, 32, 64, 128, 256...)๋กœ ์„ค์ •ํ•˜๋Š”๋ฐ ์ด๋Š” GPU memory๊ฐ€ 2์˜ ๊ฑฐ๋“ญ์ œ๊ณฑ์ด๊ธฐ ๋•Œ๋ฌธ์ด๋‹ค. + +๊ฒฝ์šฐ์— ๋”ฐ๋ผ 66๊ฐœ์˜ batch๋‚˜ 120๊ฐœ์˜ batch๋‚˜ (๋‘˜ ๋‹ค $2^6$๊ณผ $2^7$ ์‚ฌ์ด์˜ ๋ฒ”์œ„์ด๋‹ค. ์˜ˆ๋ฅผ ๋“ค์ž๋ฉด ๊ทธ๋ ‡๋‹ค๋Š” ๊ฒƒ) ํ•„์š”ํ•œ ์‹œ๊ฐ„์ด ๋™์ผํ•œ ๊ฒฝ์šฐ๊ฐ€ ๋ฐœ์ƒํ•  ์ˆ˜ ์žˆ๋‹ค. ๊ทธ๋Ÿฌ๋ฏ€๋กœ ์ตœ๋Œ€์˜ ํšจ์œจ์„ ์œ„ํ•ด GPU memory์— ํฌ๊ธฐ๋ฅผ ๋งž์ถฐ์ฃผ๋Š” ๊ฒƒ์ด๋‹ค. ๋˜ ํ•˜๋‚˜ ์ผ๋ฐ˜์ ์œผ๋กœ ์•Œ๋ ค์ง„ ๊ฒƒ์œผ๋กœ๋Š” batch size๊ฐ€ ์ž‘์„์ˆ˜๋ก ์‹œ๊ฐ„์€ ์˜ค๋ž˜ ๊ฑธ๋ฆฌ๊ณ , ํšจ๊ณผ๊ฐ€ ์ข‹๋‹ค๋Š” ๊ฒƒ์ด๋‹ค. + +shuffle ์˜ต์…˜์€ ๋ฐ์ดํ„ฐ๋ฅผ epoch๋งˆ๋‹ค ์„ž์–ด์„œ ๋ชจ๋ธ์ด ๋ฐ์ดํ„ฐ์˜ ์ˆœ์„œ๋ฅผ ์™ธ์šฐ๋Š” ๊ฒƒ์„ ๋ฐฉ์ง€ํ•  ์ˆ˜ ์žˆ๋„๋ก ํ•ด ์ค€๋‹ค. ๋Œ€๋ถ€๋ถ„์˜ ๊ฒฝ์šฐ ๋ฐ์ดํ„ฐ์˜ ์ˆœ์„œ๋ฅผ ์™ธ์šฐ์ง€ ๋ชปํ•˜๋„๋ก ํ•™์Šตํ•˜๋Š” ๊ฒƒ์ด ์ข‹์œผ๋ฏ€๋กœ Ture๋กœ ํ•ด ๋†“๋Š” ๊ฒƒ์ด ์ข‹๋‹ค. + +*** +## Full Code with `Dataset` and `DataLoader` +`Dataset` ๊ณผ `DataLoader`๋ฅผ ์ด์šฉํ•˜์—ฌ ํ•™์Šตํ•˜๋Š” ์ฝ”๋“œ๋ฅผ ์ž‘์„ฑํ•ด๋ณด์ž. + +```python +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.optim as optim + +class MultivariateLinearRegressionModel(nn.Module): + def __init__(self): + super().__init__() + self.model = nn.Linear(3, 1) + + def forward(self, x): + return self.model(x) + +# ๋ชจ๋ธ ์ดˆ๊ธฐํ™” +model = MultivariateLinearRegressionModel() +# optimizer ์„ค์ • +optimizer = optim.SGD(model.parameters(), lr=1e-5) +``` + +๋ชจ๋ธ, optimizer๋Š” lab4_1๊ณผ ๋™์ผํ•˜๊ฒŒ ์ •์˜ํ•ด์ฃผ๊ณ  + +```python +from torch.utils.data import Dataset +from torch.utils.data import DataLoader + +class CustomDataset(Dataset): + def __init__(self): + self.x_data = [[73, 80, 75], + [93, 88, 93], + [89, 91, 90], + [96, 98, 100], + [73, 66, 70]] + self.y_data = [[152], [185], [180], [196], [142]] + + def __len__(self): + return len(self.x_data) + + def __getitem__(self, idx): + return torch.FloatTensor(self.x_data[idx]), torch.FloatTensor(self.y_data[idx]) + +dataset = CustomDataset() + +dataloader = DataLoader( + dataset, + batch_size=2, + shuffle=True +) +``` + +๋ฐ์ดํ„ฐ ์…‹์€ ์˜ค๋Š˜ ํฌ์ŠคํŒ…์— ๋‚˜์˜จ ๋ฐฉ๋ฒ•์œผ๋กœ ์ •์˜๋ฅผ ํ•ด์ค€๋‹ค. + +์ด๋“ค์„ ๊ฐ€์ง€๊ณ  ํ•™์Šตํ•˜๋Š” ์ฝ”๋“œ๋Š” ์ด๋ ‡๊ฒŒ ์ž‘์„ฑํ•  ์ˆ˜ ์žˆ๋‹ค. + +```python +epochs = 20 +for epoch in range(epochs + 1): + for batch_idx, smaples in enumerate(dataloader): + x_train, y_train = smaples + + prediction = model(x_train) + + cost = F.mse_loss(prediction, y_train) + + optimizer.zero_grad() + cost.backward() + optimizer.step() + + # 5 epoch ๋งˆ๋‹ค ๋กœ๊ทธ ์ถœ๋ ฅ + if epoch%5 == 0: + print('Epoch {:4d}/{} Batch {}/{} Cost {:6f}'.format( + epoch, epochs, batch_idx, len(dataloader), cost.item() + )) + +'''output +Epoch 0/20 Batch 0/3 Cost 390.795288 +Epoch 0/20 Batch 1/3 Cost 55.004257 +Epoch 0/20 Batch 2/3 Cost 18.470478 +Epoch 5/20 Batch 0/3 Cost 8.500160 +Epoch 5/20 Batch 1/3 Cost 3.519112 +Epoch 5/20 Batch 2/3 Cost 2.370358 +Epoch 10/20 Batch 0/3 Cost 2.786815 +Epoch 10/20 Batch 1/3 Cost 4.166077 +Epoch 10/20 Batch 2/3 Cost 5.060166 +Epoch 15/20 Batch 0/3 Cost 4.609153 +Epoch 15/20 Batch 1/3 Cost 6.680350 +Epoch 15/20 Batch 2/3 Cost 4.476605 +Epoch 20/20 Batch 0/3 Cost 4.082047 +Epoch 20/20 Batch 1/3 Cost 2.758399 +Epoch 20/20 Batch 2/3 Cost 4.820268 +''' +``` + +ํ•œ epoch ์•ˆ์—์„œ minibatch๋กœ 3๋ฒˆ ํ•™์Šตํ•˜๋Š” ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. ๋‹ค๋งŒ ์•ž์„œ batch size๊ฐ€ ์ž‘์„์ˆ˜๋ก ์˜ค๋ž˜๊ฑธ๋ฆฌ๊ณ  ํšจ๊ณผ๋Š” ์ข‹๋‹ค๊ณ  ํ–ˆ๋Š”๋ฐ ์ด๋ฒˆ์˜ ๊ฒฝ์šฐ์—๋Š” ์„ฑ๋Šฅ์ด ๊ทธ๋ ‡๊ฒŒ ์ข‹์•„ ๋ณด์ด์ง€ ์•Š๋Š”๋‹ค(lab4_1์—์„œ์˜ ์ตœ์ข… cost๋Š” 0.227729). + +๊ทธ ์ด์œ ๋Š” batch size๊ฐ€ ๋„ˆ๋ฌด ์ž‘๊ธฐ ๋•Œ๋ฌธ์ด๋‹ค. ์šฐ๋ฆฌ๊ฐ€ minibatch๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ํ•™์Šต์„ ํ•  ์ˆ˜ ์žˆ๋Š” ์ด์œ ๋Š” minibatch๊ฐ€ ์ „์ฒด ๋ฐ์ดํ„ฐ ์…‹์„ ๋Œ€ํ‘œํ•  ์ˆ˜ ์žˆ๋‹ค๊ณ  ๊ฐ€์ •ํ•˜๊ธฐ ๋•Œ๋ฌธ์ด๋‹ค. ํ•˜์ง€๋งŒ ์ด๋ฒˆ ์ƒํ™ฉ์„ ๋ณด๋ฉด batch size๊ฐ€ 2 ๋ฐ–์— ๋˜์ง€ ์•Š๋Š”๋‹ค. +์ด์ •๋„ ํฌ๊ธฐ๋กœ๋Š” ์•„๋ฌด๋ฆฌ ์ƒ๊ฐํ•ด๋„ ์ „์ฒด ๋ฐ์ดํ„ฐ์˜ ๋ถ„ํฌ๋ฅผ ๋Œ€ํ‘œํ•˜๊ธฐ์—๋Š” ๋ฌด๋ฆฌ๊ฐ€ ์žˆ์–ด ๋ณด์ธ๋‹ค. ์ด๋Ÿฐ ์ด์œ  ๋•Œ๋ฌธ์— ์„ฑ๋Šฅ์ด ๋–จ์–ด์ง„ ๊ฒƒ์ด๋ผ๊ณ  ํ•  ์ˆ˜ ์žˆ๋‹ค. + +๊ทธ๋Ÿฌ๋ฏ€๋กœ batch size๋ฅผ ์„ค์ •ํ•  ๋•Œ์—๋Š” ์ถฉ๋ถ„ํžˆ ์ „์ฒด ๋ถ„ํฌ๋ฅผ ๋ฐ˜์˜ํ•  ์ˆ˜ ์žˆ๋„๋ก ์„ค์ •ํ•˜๋Š” ์ฃผ์˜๋ฅผ ๊ธฐ์šธ์—ฌ์•ผ ํ•œ๋‹ค. diff --git a/_posts/2022-04-30-dlZeroToAll-PyTorch-5.markdown b/_posts/2022-04-30-dlZeroToAll-PyTorch-5.markdown new file mode 100644 index 00000000000..eda3dcfe325 --- /dev/null +++ b/_posts/2022-04-30-dlZeroToAll-PyTorch-5.markdown @@ -0,0 +1,451 @@ +--- +title: "๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹ 2 - Lab5: Logistic Classification" +author: Kwon +date: 2022-04-30T00:00:00 +0900 +categories: [pytorch, study] +tags: [logistic-classification, binary-cross-entropy] +math: true +mermaid: false +--- + +[๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹](https://deeplearningzerotoall.github.io/season2/lec_pytorch.html) Lab 5: Logistic Classification ๊ฐ•์˜๋ฅผ ๋ณธ ํ›„ ๊ณต๋ถ€๋ฅผ ๋ชฉ์ ์œผ๋กœ ์ž‘์„ฑํ•œ ๊ฒŒ์‹œ๋ฌผ์ž…๋‹ˆ๋‹ค. + +*** +## Logistic Regression(Binary Classification) +Hypothesis๋กœ sigmoid(logistic) ํ•จ์ˆ˜๋ฅผ ์‚ฌ์šฉํ•˜๋Š” ํšŒ๊ท€ ๋ฐฉ๋ฒ•์ด๋‹ค. ํ”ํžˆ Binary classification ploblem์— ๋งŽ์ด ์‚ฌ์šฉํ•˜๋Š”๋ฐ, ์ด ๊ฒฝ์šฐ ์™œ ์„ ํ˜• ํšŒ๊ท€ ๋Œ€์‹  ๋กœ์ง€์Šคํ‹ฑ ํšŒ๊ท€๋ฅผ ์‚ฌ์šฉํ•˜๋Š”์ง€์— ๋Œ€ํ•ด ๋จผ์ € ์•Œ์•„๋ณด์ž + +### Hypothesis +๊ณต๋ถ€ ์‹œ๊ฐ„์— ๋”ฐ๋ฅธ ์‹œํ—˜ ํ•ฉ๊ฒฉ ์—ฌ๋ถ€ ๋ฐ์ดํ„ฐ ์…‹์„ ์ƒ๊ฐํ•ด ๋ณด์ž($y=1$: ํ•ฉ๊ฒฉ, $y=0$: ๋ถˆํ•ฉ๊ฒฉ). + +๋จผ์ € ์ด ๋ฐ์ดํ„ฐ ์…‹์˜ ์˜ˆ์ธก ๋ฌธ์ œ๋ฅผ ์„ ํ˜• ํšŒ๊ท€๋ฅผ ํ†ตํ•ด ์ ‘๊ทผํ•ด ๋ณผ ๊ฒƒ์ด๋‹ค. ๊ธฐ์กด์˜ ์„ ํ˜• ํšŒ๊ท€์‹์˜ hypothesis๋Š” $H(x_1, x_2, x_3) = xw + b$๋กœ ์ฃผ์–ด์กŒ์—ˆ๋‹ค. +ํ•™์Šต ์ดˆ๊ธฐ์—๋Š” 2.5์‹œ๊ฐ„(์ดˆ๋ก์ƒ‰ ์ ์„ )์„ ๊ธฐ์ค€์œผ๋กœ ํ•ฉ๊ฒฉ์ด ๊ฒฐ์ •๋˜์—ˆ๋‹ค๊ณ  ๊ฐ€์ •ํ•ด ๋ณด์ž + +![](/posting_imgs/lab5-1.png) + +์œ„์™€ ๊ฐ™์ด ํšŒ๊ท€ ํ•จ์ˆ˜๋ฅผ ํ•™์Šตํ•˜๋ฉด x(๊ณต๋ถ€ ์‹œ๊ฐ„)๊ฐ€ ์ฃผ์–ด์กŒ์„ ๋•Œ y๊ฐ€ 0.5๋ฅผ ๋„˜์—ˆ๋Š”๊ฐ€ ์—ฌ๋ถ€์— ๋”ฐ๋ผ ๋ถ„๋ฅ˜๊ฐ€ ๊ฐ€๋Šฅํ•  ๊ฒƒ์ด๋‹ค(0.5๋ฅผ ๋„˜์œผ๋ฉด ํ•ฉ๊ฒฉ์œผ๋กœ ์ฒ˜๋ฆฌํ•˜๋ฉด ๋œ๋‹ค). + +ํ•˜์ง€๋งŒ ์ด ํšŒ๊ท€์‹์˜ ๋ฒ”์œ„๋ฅผ ๋„˜์–ด์„œ๋Š” ๊ฐ’ ์ฆ‰, ํ•™์Šต๋œ hypothesis์— ๋„ฃ์—ˆ์„ ๋•Œ prediction์ด $[0, 1]$๋ฅผ ๋ฒ—์–ด๋‚˜๋Š” ๊ฐ’์ด ์žˆ๋‹ค๋ฉด ๊ทธ์— ๋งž์ถฐ ํšŒ๊ท€์„ ์„ ๋‹ค์‹œ ํ•™์Šตํ•ด์•ผ ํ•œ๋‹ค. + +![](/posting_imgs/lab5-2.png) + +์œ„์ฒ˜๋Ÿผ 9์‹œ๊ฐ„(๋นจ๊ฐ„ O) ๊ณต๋ถ€ํ•œ ์‚ฌ๋žŒ์— ๋งž์ถฐ ํ•™์Šต์„ ๋‹ค์‹œ ํ•˜๋ฉด 2.5์‹œ๊ฐ„์—์„œ 5์‹œ๊ฐ„ ์‚ฌ์ด์˜ ๋ฒ”์œ„์— ์žˆ๋Š” ์‚ฌ๋žŒ๋“ค์€ ๋ชจ๋ธ์ด ๋ถˆํ•ฉ๊ฒฉํ•œ ๊ฒƒ์œผ๋กœ ํŒ๋‹จํ•˜๊ฒŒ ๋œ๋‹ค. + +์ด๋Ÿฐ ์ƒํ™ฉ๊ณผ ๊ฐ™์ด ์ ์ ˆํ•œ ๋ถ„๋ฅ˜๋ฅผ ํ•˜๊ธฐ ํž˜๋“  ๊ฒฝ์šฐ๊ฐ€ ์‹ค์ œ ๋ฐ์ดํ„ฐ์—์„œ๋Š” ๋ฌด์ˆ˜ํžˆ ๋งŽ์ด ์กด์žฌํ•  ์ˆ˜ ์žˆ๊ณ , ํ•™์Šต์ด ๋๋‚œ ์ดํ›„ ์˜ˆ์ธก์„ ํ•  ๋•Œ prediction์˜ ๋ฒ”์œ„๊ฐ€ $[0, 1]$์„ ๋ฒ—์–ด๋‚  ์ˆ˜ ์žˆ๋‹ค๋Š” ๋ฌธ์ œ๋„ ์žˆ๋‹ค. +์ด๋Ÿฌํ•œ ์ด์œ ๋“ค๋กœ ์„ ํ˜• ํšŒ๊ท€๋งŒ์œผ๋กœ ์ ‘๊ทผํ•˜๋Š” ๊ฒƒ์€ ๋ถ„๋ฅ˜ ๋ฌธ์ œ์—๋Š” ์ ํ•ฉํ•˜์ง€ ์•Š๋‹ค๊ณ  ํ•  ์ˆ˜ ์žˆ๋‹ค. + +์ด๋ฒˆ์—๋Š” ๋กœ์ง€์Šคํ‹ฑ ํšŒ๊ท€๋กœ ์ด ๋ฌธ์ œ๋ฅผ ์ ‘๊ทผํ•ด ๋ณด์ž. ๋จผ์ € ์‹œ๊ทธ๋ชจ์ด๋“œ ํ•จ์ˆ˜(๋กœ์ง€์Šคํ‹ฑ ํ•จ์ˆ˜)๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์ •์˜๋œ๋‹ค. + +\\[ s(z) = \frac{1}{1+e^{-z}} \\] + +์ด๋ฅผ ๊ทธ๋ž˜ํ”„๋กœ ๊ทธ๋ ค๋ณด๋ฉด ์•„๋ž˜์™€ ๊ฐ™์€ ํ˜•ํƒœ๋ฅผ ๋ˆ๋‹ค. + +![](/posting_imgs/lab5-3.png){: height="60%"} + +๊ทธ๋ฆผ์œผ๋กœ๋„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋Š” ๊ฒƒ์ฒ˜๋Ÿผ ์‹œ๊ทธ๋ชจ์ด๋“œ ํ•จ์ˆ˜๋Š” ํ•ญ์ƒ 0๊ณผ 1 ์‚ฌ์ด์— ๊ฐ’์ด ์กด์žฌํ•˜์—ฌ ์•ž์„œ ์–ธ๊ธ‰ํ•œ ์„ ํ˜• ํšŒ๊ท€์˜ ๋ฌธ์ œ์ ์„ ํ•ด๊ฒฐํ•  ์ˆ˜ ์žˆ๋‹ค. +์„ ํ˜• ํšŒ๊ท€์‹์„ ์‹œ๊ทธ๋ชจ์ด๋“œ ํ•จ์ˆ˜์˜ ์ธํ’‹์œผ๋กœ ๋„ฃ์œผ๋ฉด ์„ ํ˜• ํšŒ๊ท€์‹์—์„œ ์–ด๋–ค ๊ฐ’์ด ๋‚˜์˜ค๋“  0๊ณผ 1์‚ฌ์ด์˜ ๋ฒ”์œ„๋ฅผ ๋„˜์–ด ์ƒ๊ธฐ๋Š” ๋ฌธ์ œ๊ฐ€ ์ผ์–ด๋‚˜์ง€ ์•Š์„ ๊ฒƒ์ด๋‹ค. + +๊ทธ๋Ÿฌ๋ฏ€๋กœ ๋กœ์ง€์Šคํ‹ฑ ํšŒ๊ท€์˜ hypothesis๋Š” ๋กœ์ง€์Šคํ‹ฑ ํ•จ์ˆ˜์— ์„ ํ˜• ํšŒ๊ท€์‹์„ ๋„ฃ๋Š” ๊ฒƒ์œผ๋กœ ์ •์˜๋œ๋‹ค. + +\\[ H(X) = \frac{1}{1+e^{-W^T X}} \\] + +์ด hypothesis๋ฅผ ๋”ฐ๋ฅธ๋‹ค๋ฉด ์•„๋ฌด๋ฆฌ ํฌ๊ฑฐ๋‚˜ ์ž‘์€ ๊ฐ’์ด ์ฃผ์–ด์ง€๋”๋ผ๋„ 0๊ณผ 1์‚ฌ์ด๋ฅผ ๋ฒ—์–ด๋‚˜์ง€ ์•Š๊ธฐ ๋•Œ๋ฌธ์— ๊ธฐ์กด์— ํ•ฉ๊ฒฉ์ด๋ผ๊ณ  ๋ถ„๋ฅ˜ ๋œ ๊ฐ’์ด ๊ฐ‘์ž๊ธฐ ๋ถˆํ•ฉ๊ฒฉ์ด ๋  ๊ฐ€๋Šฅ์„ฑ์ด ๋งค์šฐ ์ค„์–ด๋“ ๋‹ค. + +์ด์ฒ˜๋Ÿผ binary classification์— ์ ํ•ฉํ•œ ๊ฐ’์œผ๋กœ ๋งŒ๋“ค์–ด ์ฃผ๋Š” ํŠน์„ฑ ๋•Œ๋ฌธ์— ๋กœ์ง€์Šคํ‹ฑ ํšŒ๊ท€ ๋ฟ๋งŒ ์•„๋‹ˆ๋ผ, neural network๋ฅผ ์ด์šฉํ•œ binary classification์—์„œ output ์ง์ „์˜ ๋งˆ์ง€๋ง‰ ๋ ˆ์ด์–ด๋กœ๋„ ์‚ฌ์šฉ๋œ๋‹ค. ์ด ๋•Œ๋ฌธ์— activation function(ํ™œ์„ฑํ™” ํ•จ์ˆ˜)์ค‘ ํ•˜๋‚˜์ด๊ธฐ๋„ ํ•˜๋‹ค. +

+ +### Cost(Binary Cross Entropy) +Hpothesis๋„ ์ •์˜ํ–ˆ์œผ๋‹ˆ cost๋ฅผ ์ •์˜ํ•  ์ฐจ๋ก€์ด๋‹ค. ์‹œ๊ทธ๋ชจ์ด๋“œ ํ•จ์ˆ˜์˜ ๊ฒฝ์šฐ ์„ ํ˜• ํšŒ๊ท€์—์„œ ์ผ๋˜ MSE๋ฅผ ์‚ฌ์šฉํ•˜๋ฉด local minimun์ด ์žˆ์„ ์ˆ˜ ์žˆ๊ธฐ ๋•Œ๋ฌธ์— ์ œ๋Œ€๋กœ ๋œ ํ•™์Šต์ด ๋˜์ง€ ์•Š์„ ์ˆ˜ ์žˆ๋‹ค. + +๋ง๋กœ ํ•˜๊ธฐ๋ณด๋‹ค ๊ทธ๋ž˜ํ”„๋กœ ํ•œ๋ฒˆ ํ™•์ธํ•ด ๋ณด์ž. +```python +import matplotlib.pyplot as plt +import torch +import numpy as np +import os +os.environ['KMP_DUPLICATE_LIB_OK']='True' + +x_data = [[1, 2], [2, 3], [3, 1], [4, 3], [5, 3], [6, 2]] +y_data = [[0], [0], [0], [1], [1], [1]] +W_l = np.linspace(-3, 3, 1000) +cost_l = [] +for W in W_l: + hypothesis = 1 / (1 + torch.exp(-(W * x_train))) + cost = torch.mean((hypothesis - y_train) ** 2) + + cost_l.append(cost.item()) + +plt.plot(W_l, cost_l) +plt.xlabel('$W$') +plt.ylabel('Cost') +plt.show() +``` + +![](/posting_imgs/lab5-4.png) + +์šฐ๋ฆฌ๊ฐ€ ์ƒ๊ฐํ•˜๊ธฐ์— `W`๊ฐ€ 0 ๊ทผ์ฒ˜์˜ ๊ฐ’์œผ๋กœ ์ˆ˜๋ ดํ•˜๋Š” ๊ฒƒ์ด ๋งž์•„ ๋ณด์ธ๋‹ค. ํ•˜์ง€๋งŒ gradient descent๋ฅผ ์‹œ์ž‘ํ•˜๋Š” ์ ์ด -1 ๋ถ€๊ทผ, ํ˜น์€ ๋” ์ž‘์€ ๊ฐ’์ด๋ผ๋ฉด ์šฐ๋ฆฌ๊ฐ€ ์›ํ•˜๋Š” ๋ฐฉํ–ฅ๊ณผ ๋ฐ˜๋Œ€๋กœ ๊ทน์†Œ์ ์„ ์ฐพ์•„๊ฐˆ ๊ฒƒ์ด๋‹ค. ๊ทธ ๋ฐฉํ–ฅ์—๋„ ๊ทน์†Œ์ ์ด ์žˆ์„ ์ˆ˜ ์žˆ๊ฒ ์ง€๋งŒ ๊ทธ๊ฑด ์šฐ๋ฆฌ๊ฐ€ ์›ํ•˜๋Š” global minimum์ด ์•„๋‹ˆ๋‹ค. + +์œ„ ๊ทธ๋ž˜ํ”„๋Š” ์กฐ๊ธˆ ์• ๋งคํ•˜๋‹ค๊ณ  ๋А๋‚„ ์ˆ˜ ์žˆ๊ฒ ์ง€๋งŒ ์‹ค์ œ๋กœ cost์— global minimum์ด ์•„๋‹Œ ์—ฌ๋Ÿฌ ๊ทน์†Œ์ ์ด ์žˆ๋‹ค๋Š” ๊ฒƒ์€ ํ™•์‹คํžˆ ํ•™์Šต์ด ์ž˜ ๋˜์ง€ ์•Š์„ ๊ฒƒ์ด๋ผ๋Š” ๊ฑด ์•Œ ์ˆ˜ ์žˆ๋‹ค. + +๊ทธ๋ž˜์„œ cost๋กœ ๋‹ค์Œ๊ณผ ๊ฐ™์€ ํ•จ์ˆ˜๋ฅผ ์‚ฌ์šฉํ•œ๋‹ค. + +\\[ cost(W) = -\frac{1}{m} \sum y \log\left(H(x)\right) + (1-y) \left( \log(1-H(x) \right) \\] + +log๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ์ง€์ˆ˜ํ•จ์ˆ˜๋ฅผ ๋‹ค๋ฃจ๊ธฐ ์ข‹์€ ํ˜•ํƒœ๋กœ ๋งŒ๋“ค์–ด ์ฃผ๊ณ , ์ •๋‹ต์— ๋”ฐ๋ผ ์ ์ ˆํ•œ cost๋ฅผ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ๋„๋ก ๋งŒ๋“ค์—ˆ๋Š”๋ฐ ๊ทธ cost์˜ ํ˜•ํƒœ๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. + +![](/posting_imgs/lab5-5.png){: height="50%" aligncenter} + +๋‹ต์ด 1์ธ ๊ฒฝ์šฐ(์ดˆ๋ก์ƒ‰) 0์— ๊ฐ€๊นŒ์šธ ๋•Œ cost๊ฐ€ ์˜ฌ๋ผ๊ฐ€๊ณ  1์ผ ๋•Œ 0์ด๋„๋ก, ๋‹ต์ด 0์ธ ๊ฒฝ์šฐ ๊ทธ ๋ฐ˜๋Œ€๋กœ ์ž‘๋™ํ•˜๊ฒŒ cost๋ฅผ ์ •์˜ํ•˜์˜€๋‹ค. +๋•๋ถ„์— ์‹œ๊ทธ๋ชจ์ด๋“œ๋ฅผ ์ด์šฉํ•ด์„œ ํ•™์Šตํ•˜๊ธฐ์— ์ ์ ˆํ•œ cost์˜ ํ˜•ํƒœ๊ฐ€ ๋˜์—ˆ๋‹ค. + +์ด๋ฅผ ํ†ตํ•œ gradient descent๋Š” ์ „๊ณผ ๋™์ผํ•œ ํ˜•ํƒœ๋กœ ์ง„ํ–‰ํ•œ๋‹ค. + +\\[ W := W - \alpha \frac{\partial}{\partial W} cost(W) \\] + +*** +## Import +```python +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.optim as optim + +# seed ๊ณ ์ • +torch.manual_seed(1) +``` + +*** +## Training data +```python +x_data = [[1, 2], [2, 3], [3, 1], [4, 3], [5, 3], [6, 2]] +y_data = [[0], [0], [0], [1], [1], [1]] + +x_train = torch.FloatTensor(x_data) +y_train = torch.FloatTensor(y_data) + +print(x_train.shape) +print(y_train.shape) + +'''output +torch.Size([6, 2]) +torch.Size([6, 1]) +''' +``` + +*** +## Computing Hypothesis and Cost Function +์ผ๋‹จ์€ ์ˆ˜์‹ ๊ทธ๋Œ€๋กœ ์ฝ”๋“œ๋ฅผ ์ž‘์„ฑํ•ด์„œ ์‚ฌ์šฉํ•ด๋ณด์ž. + +### Hypothesis +\\[ H(X) = \frac{1}{1+e^{-W^T X}} \\] + +```python +W = torch.zeros((2, 1), requires_grad=True) +b = torch.zeros(1, requires_grad=True) + +hothesis = 1 / (1 + torch.exp(-(x_train.matmul(W) + b))) + +print(hypothesis) +print(hypothesis.shape) + +'''output +tensor([[0.5000], + [0.5000], + [0.5000], + [0.5000], + [0.5000], + [0.5000]], grad_fn=) +torch.Size([6, 1]) +''' +``` +`torch.exp(x)`๋กœ $e^x$์—ฐ์‚ฐ์„ ํ•  ์ˆ˜ ์žˆ๋‹ค. +W์™€ b๊ฐ€ ๋ชจ๋‘ 0์ด๊ธฐ ๋•Œ๋ฌธ์— 0.5๊ฐ€ output์œผ๋กœ ๋‚˜์˜จ ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. + +### Cost +\\[ cost(W) = -\frac{1}{m} \sum y \log\left(H(x)\right) + (1-y) \left( \log(1-H(x) \right) \\] + +```python +losses = -(y_train * torch.log(hypothesis) + (1 - y_train) * torch.log(1 - hypothesis)) +print(losses) + +'''output +tensor([[0.6931], + [0.6931], + [0.6931], + [0.6931], + [0.6931], + [0.6931]], grad_fn=) +''' +``` + +$log$ ์—ฐ์‚ฐ๋„ ๋งˆ์ฐฌ๊ฐ€์ง€๋กœ `torch.log()`๋ฅผ ํ†ตํ•ด ํ•  ์ˆ˜ ์žˆ๋‹ค. +loss๋„ ์ž˜ ์ถœ๋ ฅ๋œ๋‹ค. + +### Full code +```python +# ๋ชจ๋ธ ์ดˆ๊ธฐํ™” +W = torch.zeros((2, 1), requires_grad=True) +b = torch.zeros(1, requires_grad=True) +# optimizer ์„ค์ • +optimizer = optim.SGD([W, b], lr=1) + +nb_epochs = 1000 +for epoch in range(nb_epochs + 1): + + # Cost ๊ณ„์‚ฐ + hypothesis = torch.sigmoid(x_train.matmul(W) + b) # or .mm or @ + cost = -(y_train * torch.log(hypothesis) + + (1 - y_train) * torch.log(1 - hypothesis)).mean() + + # cost๋กœ H(x) ๊ฐœ์„  + optimizer.zero_grad() + cost.backward() + optimizer.step() + + # 100๋ฒˆ๋งˆ๋‹ค ๋กœ๊ทธ ์ถœ๋ ฅ + if epoch % 100 == 0: + print('Epoch {:4d}/{} Cost: {:.6f}'.format( + epoch, nb_epochs, cost.item() + )) + +'''output +Epoch 0/1000 Cost: 0.693147 +Epoch 100/1000 Cost: 0.134722 +Epoch 200/1000 Cost: 0.080643 +Epoch 300/1000 Cost: 0.057900 +Epoch 400/1000 Cost: 0.045300 +Epoch 500/1000 Cost: 0.037261 +Epoch 600/1000 Cost: 0.031673 +Epoch 700/1000 Cost: 0.027556 +Epoch 800/1000 Cost: 0.024394 +Epoch 900/1000 Cost: 0.021888 +Epoch 1000/1000 Cost: 0.019852 +''' +``` + +hypothesis์™€ cost๊ฐ€ ๋‹ฌ๋ฆฌ์ง„ ๊ฒƒ ์™ธ์—๋Š” ์ด์ „์˜ lab๋“ค์—์„œ ํ•™์Šตํ•˜๋Š” ์ฝ”๋“œ์™€ ๋‹ฌ๋ผ์ง„ ๊ฒƒ์ด ์—†๋‹ค. + +## Computing Hypothesis and Cost Function with `torch` +์‹œ๊ทธ๋ชจ์ด๋“œ ํ•จ์ˆ˜์™€ binary cross entropy๋Š” PyTorch์—์„œ ๊ธฐ๋ณธ์ ์œผ๋กœ ์ œ๊ณตํ•˜๊ธฐ ๋•Œ๋ฌธ์— ๊ตณ์ด ์œ„์ฒ˜๋Ÿผ ๊ตฌํ˜„ํ•˜์ง€ ์•Š๊ณ  ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ๋‹ค + +```python +hypothesis = torch.sigmoid(x_train.matmul(W) + b) + +print(hypothesis) +print(hypothesis.shape) +ptint(F.binary_cross_entropy(hypothesis, y_train)) + +'''output +tensor([[0.5000], + [0.5000], + [0.5000], + [0.5000], + [0.5000], + [0.5000]], grad_fn=) +torch.Size([6, 1]) +tensor(0.6931, grad_fn=) +''' +``` + +์•ž์„œ ์ˆ˜์‹์œผ๋กœ ๊ตฌํ˜„ํ–ˆ๋˜ ๊ฒƒ๊ณผ ๋™์ผํ•œ ๊ฒฐ๊ณผ๊ฐ€ ๋‚˜์˜จ๋‹ค. + +### Full Code +```python +# ๋ชจ๋ธ ์ดˆ๊ธฐํ™” +W = torch.zeros((2, 1), requires_grad=True) +b = torch.zeros(1, requires_grad=True) +# optimizer ์„ค์ • +optimizer = optim.SGD([W, b], lr=1) + +nb_epochs = 1000 +for epoch in range(nb_epochs + 1): + + # Cost ๊ณ„์‚ฐ + hypothesis = torch.sigmoid(x_train.matmul(W) + b) # or .mm or @ + cost = F.binary_cross_entropy(hypothesis, y_train) + + # cost๋กœ H(x) ๊ฐœ์„  + optimizer.zero_grad() + cost.backward() + optimizer.step() + + # 100๋ฒˆ๋งˆ๋‹ค ๋กœ๊ทธ ์ถœ๋ ฅ + if epoch % 100 == 0: + print('Epoch {:4d}/{} Cost: {:.6f}'.format( + epoch, nb_epochs, cost.item() + )) +'''output +Epoch 0/1000 Cost: 0.693147 +Epoch 100/1000 Cost: 0.134722 +Epoch 200/1000 Cost: 0.080643 +Epoch 300/1000 Cost: 0.057900 +Epoch 400/1000 Cost: 0.045300 +Epoch 500/1000 Cost: 0.037261 +Epoch 600/1000 Cost: 0.031672 +Epoch 700/1000 Cost: 0.027556 +Epoch 800/1000 Cost: 0.024394 +Epoch 900/1000 Cost: 0.021888 +Epoch 1000/1000 Cost: 0.019852 +''' +``` + +๋งˆ์ž”๊ฐ€์ง€๋กœ hypothesis์™€ cost๋งŒ ๋‹ฌ๋ผ์ง€๊ณ  ๊ธฐ๋ณธ์ ์ธ ํ‹€์€ ๊ฐ™๋‹ค. ํ•™์Šต๋„ ์ž˜ ๋˜๋Š” ๋ชจ์Šต์ด๋‹ค. + +*** +## Evaluation +์šฐ๋ฆฌ๊ฐ€ ๋งŒ๋“  ๋ชจ๋ธ์ด ์–ผ๋งˆ๋‚˜ ์ •ํ™•ํ•œ์ง€ ํ™•์ธํ•ด๋ณด์ž + +์‹ค์ œ ์šฐ๋ฆฌ๊ฐ€ ๋งŒ๋“  ๋ชจ๋ธ์€ ์‹œ๊ทธ๋ชจ์ด๋“œ ํ•จ์ˆ˜์˜ ๊ฒฐ๊ณผ๊ฐ’์ด๊ธฐ ๋•Œ๋ฌธ์— ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์†Œ์ˆ˜์˜ ํ˜•ํƒœ๋กœ ๋‚˜ํƒ€๋‚œ๋‹ค. + +```python +hypothesis = torch.sigmoid(x_train.matmul(W) + b) +print(hypothesis[:5]) + +'''output +tensor([[0.4103], + [0.9242], + [0.2300], + [0.9411], + [0.1772]], grad_fn=) +''' +``` + +์‹ค์ œ๋กœ๋Š” 0๋˜๋Š” 1์˜ ๊ฐ’์„ ๊ฐ€์ง€๊ธฐ ๋•Œ๋ฌธ์— ์–ด๋–ค ๊ธฐ์ค€์„ ํ†ตํ•ด ๊ฒฐ๊ณผ๊ฐ’์„ ๋งž์ถฐ ์ค„ ํ•„์š”๊ฐ€ ์žˆ์–ด ๋ณด์ธ๋‹ค. ์—ฌ๊ธฐ์„œ๋Š” 0.5๋ฅผ ๊ธฐ์ค€์œผ๋กœ 0๊ณผ 1์„ ๋‚˜๋ˆˆ๋‹ค๊ณ  ์ƒ๊ฐํ•˜๊ณ  ์ง„ํ–‰ํ–ˆ๋‹ค. + +```python +prediction = hypothesis >= torch.FloatTensor([0.5]) +print(prediction[:5]) + +'''output +tensor([[0], + [1], + [0], + [1], + [0]], dtype=torch.uint8) +''' +``` + +์ด๋ ‡๊ฒŒ ํ•˜๋ฉด 0.5 ์ด์ƒ์˜ ๊ฐ’์€ 1๋กœ, ๋ฏธ๋งŒ์˜ ๊ฐ’๋Š” 0์œผ๋กœ ๋งตํ•‘ํ•˜์—ฌ `ByteTensor`์˜ ํ˜•ํƒœ๋กœ ๋Œ๋ ค์ค€๋‹ค. + +์‹ค์ œ ๊ฐ’๊ณผ ๋น„๊ตํ•ด ๋ณด๋ฉด ์ž˜ ์˜ˆ์ธกํ•œ ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. + +```python +print(prediction[:5]) +print(y_train[:5]) + +correct_prediction = prediction.float() == y_train +print(correct_prediction[:5]) + +'''output +tensor([[0], + [1], + [0], + [1], + [0]], dtype=torch.uint8) +tensor([[0.], + [1.], + [0.], + [1.], + [0.]]) + +tensor([[1], + [1], + [1], + [1], + [1]], dtype=torch.uint8) +''' +``` + +๋ชจ๋ธ์ด ์˜ˆ์ธกํ•œ ๊ฐ’์ด ์‹ค์ œ์™€ ๊ฐ™์•˜์„ ๋•Œ 1์„ ๋ฐ˜ํ™˜ํ•˜๋„๋ก ํ–ˆ์„ ๋•Œ ๋ชจ๋‘ 1์ด ๋‚˜์™”์œผ๋ฏ€๋กœ ์ฃผ์–ด์ง„ ๊ฐ„๋‹จํ•œ ๋ฐ์ดํ„ฐ ์…‹์— ํ•œํ•ด์„œ๋Š” ์ž˜ ํ•™์Šต์ด ๋˜์—ˆ๋‹ค๊ณ  ํ•  ์ˆ˜ ์žˆ๋‹ค. + +*** +## Higher Implementation with Class +Bynary Classification ๋ฌธ์ œ์—์„œ Sigmoid Reggression์„ ์‚ฌ์šฉํ•˜๋ ค๊ณ  ํ•  ๋•Œ [lab4_1](/posts/dlZeroToAll-PyTorch-4_1/)์—์„œ ๋‚˜์™”๊ฑด ๊ฒƒ์ฒ˜๋Ÿผ `Module`์„ ์‚ฌ์šฉํ•˜์—ฌ calss๋ฅผ ๋งŒ๋“ค์–ด ์ง„ํ–‰ํ•  ์ˆ˜ ์žˆ๋‹ค. + +์ด๋ฒˆ์— ์‚ฌ์šฉํ•œ ๋ฐ์ดํ„ฐ ์…‹์€ ๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹ ์ฝ”๋“œ์™€ ํ•จ๊ป˜ ์ œ๊ณต๋˜๋Š” ๋‹น๋‡จ๋ณ‘ ๋ฐ์ดํ„ฐ์…‹์ด๋ฉฐ 8๊ฐœ ์ฐจ์›์˜ ์ธํฟ๊ณผ ๋‹น๋‡จ๋ณ‘ ์—ฌ๋ถ€์— ๋”ฐ๋ฅธ 0, 1์˜ ๊ฐ’์œผ๋กœ ๋ฐ์ดํ„ฐ๊ฐ€ ๊ตฌ์„ฑ๋˜์–ด ์žˆ๋‹ค. + +### Loading Real Data +```python +import numpy as np + +xy = np.loadtxt('data-03-diabetes.csv', delimiter=',', dtype=np.float32) +x_data = xy[:, 0:-1] +y_data = xy[:, [-1]] +x_train = torch.FloatTensor(x_data) +y_train = torch.FloatTensor(y_data) + +print(x_train[0:5]) +print(y_train[0:5]) + +'''output +tensor([[-0.2941, 0.4874, 0.1803, -0.2929, 0.0000, 0.0015, -0.5312, -0.0333], + [-0.8824, -0.1457, 0.0820, -0.4141, 0.0000, -0.2072, -0.7669, -0.6667], + [-0.0588, 0.8392, 0.0492, 0.0000, 0.0000, -0.3055, -0.4927, -0.6333], + [-0.8824, -0.1055, 0.0820, -0.5354, -0.7778, -0.1624, -0.9240, 0.0000], + [ 0.0000, 0.3769, -0.3443, -0.2929, -0.6028, 0.2846, 0.8873, -0.6000]]) +tensor([[0.], + [1.], + [0.], + [1.], + [0.]]) +''' +``` + +### Train with Class +์ฐจ์›์— ๋งž์ถฐ class๋ฅผ ์ •์˜ํ•ด ์ค€๋‹ค. ์ด๋•Œ ๋ฐ”๋กœ ์‹œ๊ทธ๋ชจ์ด๋“œ ํ•จ์ˆ˜์— ๋„ฃ๋Š” ๊ฒƒ์ด ์•„๋‹ˆ๋ผ linear function์œผ๋กœ ๋งŒ๋“ค์–ด ์ค€ ํ›„์— ์ ์šฉํ•ด์•ผ ์šฐ๋ฆฌ๊ฐ€ ์›ํ•˜๋Š” hypothesis๊ฐ€ ๋งŒ๋“ค์–ด์ง„๋‹ค. + +```python +class BinaryClassifier(nn.Module): + def __init__(self): + super().__init__() + self.linear = nn.Linear(8, 1) + self.sigmoid = nn.Sigmoid() + + def forward(self, x): + return self.sigmoid(self.linear(x)) +``` + +```python +model = BinaryClassifier() + +# optimizer ์„ค์ • +optimizer = optim.SGD(model.parameters(), lr=1) + +nb_epochs = 100 +for epoch in range(nb_epochs + 1): + + # H(x) ๊ณ„์‚ฐ + hypothesis = model(x_train) + + # cost ๊ณ„์‚ฐ + cost = F.binary_cross_entropy(hypothesis, y_train) + + # cost๋กœ H(x) ๊ฐœ์„  + optimizer.zero_grad() + cost.backward() + optimizer.step() + + # 20๋ฒˆ๋งˆ๋‹ค ๋กœ๊ทธ ์ถœ๋ ฅ + if epoch % 10 == 0: + prediction = hypothesis >= torch.FloatTensor([0.5]) + correct_prediction = prediction.float() == y_train + accuracy = correct_prediction.sum().item() / len(correct_prediction) + print('Epoch {:4d}/{} Cost: {:.6f} Accuracy {:2.2f}%'.format( + epoch, nb_epochs, cost.item(), accuracy * 100, + )) + +'''output +Epoch 0/100 Cost: 0.704829 Accuracy 45.72% +Epoch 10/100 Cost: 0.572391 Accuracy 67.59% +Epoch 20/100 Cost: 0.539563 Accuracy 73.25% +Epoch 30/100 Cost: 0.520042 Accuracy 75.89% +Epoch 40/100 Cost: 0.507561 Accuracy 76.15% +Epoch 50/100 Cost: 0.499125 Accuracy 76.42% +Epoch 60/100 Cost: 0.493177 Accuracy 77.21% +Epoch 70/100 Cost: 0.488846 Accuracy 76.81% +Epoch 80/100 Cost: 0.485612 Accuracy 76.28% +Epoch 90/100 Cost: 0.483146 Accuracy 76.55% +Epoch 100/100 Cost: 0.481234 Accuracy 76.81% +''' +``` + +ํ•™์Šต์ด ์ •์ƒ์ ์œผ๋กœ ๋œ ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. ์ด๋•Œ accuracy๋Š” `correct_prediction`์— ๋งž์•˜๋Š”์ง€ ํ‹€๋ ธ๋Š”์ง€๋ฅผ ์ €์žฅํ•ด ํ‰๊ท ์„ ๋‚ด์–ด ๊ณ„์‚ฐํ•œ๋‹ค. \ No newline at end of file diff --git a/_posts/2022-05-01-dlZeroToAll-PyTorch-6.markdown b/_posts/2022-05-01-dlZeroToAll-PyTorch-6.markdown new file mode 100644 index 00000000000..4f790a0142e --- /dev/null +++ b/_posts/2022-05-01-dlZeroToAll-PyTorch-6.markdown @@ -0,0 +1,233 @@ +--- +title: "๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹ 2 - Lab6: Softmax Classification" +author: Kwon +date: 2022-05-01T23:00:00 +0900 +categories: [pytorch, study] +tags: [softmax-classification, multinomial-classification, cross-entropy] +math: true +mermaid: false +--- + +[๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹](https://deeplearningzerotoall.github.io/season2/lec_pytorch.html) Lab 6: Softmax Classification ๊ฐ•์˜๋ฅผ ๋ณธ ํ›„ ๊ณต๋ถ€๋ฅผ ๋ชฉ์ ์œผ๋กœ ์ž‘์„ฑํ•œ ๊ฒŒ์‹œ๋ฌผ์ž…๋‹ˆ๋‹ค. + +*** +## Softmax Classification(Multinomial Classification) +[์ด์ „ ํฌ์ŠคํŒ…](/posts/dlZeroToAll-PyTorch-5/)์—์„œ๋Š” ์ด์ง„ ๋ถ„๋ฅ˜ ๋ฌธ์ œ์— ๋Œ€ํ•ด ์•Œ์•„๋ดค๋‹ค. ์ด๋ฒˆ์—๋Š” ๋ถ„๋ฅ˜ํ•ด์•ผ ํ•  ๋ฒ”์ฃผ๊ฐ€ 2๊ฐœ(0, 1)๊ฐ€ ์•„๋‹ˆ๋ผ ์—ฌ๋Ÿฌ๊ฐœ์ธ ๋‹ค์ค‘ ๋ถ„๋ฅ˜์— ๋Œ€ํ•ด ์•Œ์•„๋ณด๋ ค ํ•œ๋‹ค. + +### Hypothesis +์ด์ง„ ๋ถ„๋ฅ˜๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™์ด ๋‘ ์นดํ…Œ๊ณ ๋ฆฌ์˜ ๊ฐ’๋“ค์ด ์ž˜ ๋‚˜๋ˆ ์งˆ ์ˆ˜ ์žˆ๋Š” ์„ ์„ ๊ธ‹๋Š” ๊ฒƒ๊ณผ ๊ฐ™๋‹ค. + +![](/posting_imgs/lab6-1.png){: width="40%"} + +์ด์ฒ˜๋Ÿผ ๋‹ค์ค‘ ๋ถ„๋ฅ˜๋„ ์—ฌ๋Ÿฌ ์นดํ…Œ๊ณ ๋ฆฌ์˜ ๊ฐ’๋“ค์„ ์ž˜ ๋‚˜๋ˆ„๋Š” ์„ ๋“ค์„ ๊ธ‹๋Š”๋‹ค๊ณ  ์ƒ๊ฐํ•ด ๋ณด์ž. ๊ทธ๋Ÿฌ๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™์€ ๊ทธ๋ฆผ์ด ๊ทธ๋ ค์งˆ ๊ฒƒ์ด๋‹ค. + +![](/posting_imgs/lab6-2.png){: width="40%"} + +ํŒŒ๋ž€์ƒ‰ ์„ ์€ **A or not**, ๋นจ๊ฐ„์ƒ‰ ์„ ์€ **B or not** ๊ทธ๋ฆฌ๊ณ  ์ดˆ๋ก์ƒ‰ ์„ ์€ **C or not**์œผ๋กœ ๊ตฌ๋ถ„ํ•˜๊ณ  ์žˆ๋‹ค๊ณ  ์ƒ๊ฐํ•˜๋ฉด ๊ฐ ์นดํ…Œ๊ณ ๋ฆฌ๋งˆ๋‹ค ํ•˜๋‚˜์˜ ๋ถ„๋ฅ˜ ๋ฌธ์ œ๋กœ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. + +์ด ๋ง์€ ๊ฐ๊ฐ์˜ ์นดํ…Œ๊ณ ๋ฆฌ๋ฅผ ๋ถ„๋ฅ˜ํ•˜๋Š” hypothesis๋ฅผ ์ •์˜ํ•  ์ˆ˜ ์žˆ๋‹ค๋Š” ๊ฒƒ์ธ๋ฐ, ์ด๋ฅผ ๋‹ค์Œ๊ณผ ๊ฐ™์ด ํ–‰๋ ฌ์˜ ์—ฐ์‚ฐ์œผ๋กœ ๊น”๋”ํ•˜๊ฒŒ ๋‚˜ํƒ€๋‚ผ ์ˆ˜ ์žˆ๋‹ค. + +![](/posting_imgs/lab6-3.png) + +์ด๋ ‡๊ฒŒ ๋‚˜์˜จ ํ™•๋ฅ ์„ ๊ฐ๊ฐ ์‹œ๊ทธ๋ชจ์ด๋“œ๋กœ ์ฒ˜๋ฆฌํ•˜์—ฌ ๊ฐ’์„ ๋‚ผ ์ˆ˜๋„ ์žˆ๊ฒ ์ง€๋งŒ, ์ด๋•Œ ์กฐ๊ธˆ ๋” ํŽธํ•˜๊ณ  ์ตœ์ ํ™”๋œ ํ•จ์ˆ˜๋กœ softmax๋ฅผ ์‚ฌ์šฉํ•˜๊ฒŒ ๋˜๋ฉฐ ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์ •์˜๋œ๋‹ค. + +\\[ P(class=i) = \frac{e^i}{\sum e^i} \\] + +max๋ฅผ softํ•˜๊ฒŒ ๋ฝ‘๋Š” ๊ฒƒ์œผ๋กœ, ์ด ๊ฐ’์ด ์ตœ๋Œ€์ธ๊ฐ€, ์•„๋‹Œ๊ฐ€๋กœ ๋‹จ์ •์ง€์–ด ์ถœ๋ ฅํ•˜๋Š” ๊ฒƒ์ด ์•„๋‹ˆ๋ผ ์ตœ๋Œ€์ผ ํ™•๋ฅ ์„ ์ถœ๋ ฅํ•˜๋Š” ๊ฑฐ๋ผ๊ณ  ์ดํ•ดํ•˜๋ฉด ๋ ๊ฒƒ ๊ฐ™๋‹ค. + +์ด๋ ‡๊ฒŒ ๋ฝ‘์€ ํ™•๋ฅ ๊ฐ’์œผ๋กœ ์›-ํ•ซ ์ธ์ฝ”๋”ฉํ•˜์—ฌ ์ตœ์ข…์ ์œผ๋กœ ์–ด๋А ์นดํ…Œ๊ณ ๋ฆฌ์— ์†ํ•˜๋Š”์ง€ ์ถœ๋ ฅํ•˜๊ฒŒ ๋œ๋‹ค. ์•„๋ž˜ ๊ทธ๋ฆผ์€ A ์นดํ…Œ๊ณ ๋ฆฌ์— ์†ํ•œ๋‹ค๊ณ  ์ƒ๊ฐํ•  ๋•Œ์˜ ์ถœ๋ ฅ์„ ๋ณด์ธ ๊ฒƒ์ด๋‹ค. + +![](/posting_imgs/lab6-4.png) + +์ฝ”๋“œ๋กœ๋Š” `torch`์—์„œ ์ œ๊ณตํ•˜๋Š” ํ•จ์ˆ˜๋ฅผ ์‚ฌ์šฉํ•˜๋ฉด ๋œ๋‹ค. + +```python +import torch +import torch.nn.functional as F + +z = torch.FloatTensor([1, 2, 3]) +hypothesis = F.softmax(z, dim=0) +print(hypothesis) + +'''output +tensor([0.0900, 0.2447, 0.6652]) +''' +``` + +๊ฐ€์žฅ ํฐ ๊ฐ’์ธ 3์˜ ํ™•๋ฅ ์ด ๊ฐ€์žฅ ํฌ๊ฒŒ(softmax) ์ถœ๋ ฅ๋œ ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. + +

+ +### Cost(Cross Entropy) + +hHypothesis๊นŒ์ง€ ๋ชจ๋‘ ์ •์˜ํ–ˆ์œผ๋‹ˆ, ์ด์ œ ๊ทธ์— ๋งž๋Š” cost๋ฅผ ์ •์˜ํ•ด์•ผ ํ•œ๋‹ค. ๋‹ค์ค‘ ๋ถ„๋ฅ˜์—์„œ๋Š” cross entropy๋ฅผ cost๋กœ ์‚ฌ์šฉํ•˜๋Š”๋ฐ, ์ด๋Š” ๊ฐ„๋‹จํžˆ ๋งํ•ด์„œ ์˜ˆ์ธกํ•œ ํ™•๋ฅ ์ด ์‹ค์ œ ๋ถ„ํฌ์˜ ํ™•๋ฅ ๊ณผ ์–ผ๋งˆ๋‚˜ ์ฐจ์ด๊ฐ€ ๋‚˜๋Š”์ง€ ๊ณ„์‚ฐํ•œ ๊ฐ’์ด๋‹ค. + +์ž์„ธํ•œ ๋‚ด์šฉ์€ ์ด๋ฏธ [cross entropy ํฌ์ŠคํŒ…](/posts/cross-entropy/)์—์„œ ๋‹ค๋ฃจ์—ˆ๊ธฐ ๋•Œ๋ฌธ์— ์—ฌ๊ธฐ๋กœ ๋Œ๋ฆฌ๊ณ  ๋ฐ”๋กœ ์ฝ”๋“œ๋กœ ๋„˜์•„๊ฐ€์ž. ์‹ค์ œ ๊ฐ’์„ ์›ํ•ซ ์ธ์ฝ”๋”ฉ ํ•œ ๊ฒƒ์—, softmax์— log๋ฅผ ์ทจํ•œ ๊ฐ’์„ ์›์†Œ๋ณ„๋กœ ๊ณฑํ•œ ๋’ค์—, 0์ด ์•„๋‹Œ ๊ฐ’๋“ค์€ ํ‰๊ท ๋‚ด์–ด ์ „์ฒด์ ์ธ cross entropy๋ฅผ ๊ตฌํ•  ์ˆ˜ ์žˆ๋‹ค. + +```python +z = torch.rand(3, 5, requires_grad=True) +hypothesis = F.softmax(z, dim=1) +print(hypothesis) + +y = torch.randint(5, (3,)).long() +print(y) + +y_one_hot = torch.zeros_like(hypothesis) +y_one_hot.scatter_(1, y.unsqueeze(1), 1) + +cost = (y_one_hot * -torch.log(hypothesis)).sum(dim=1).mean() +print(cost) + +'''output +tensor([[0.2645, 0.1639, 0.1855, 0.2585, 0.1277], + [0.2430, 0.1624, 0.2322, 0.1930, 0.1694], + [0.2226, 0.1986, 0.2326, 0.1594, 0.1868]], grad_fn=) + +tensor([0, 2, 1]) + +tensor([[1., 0., 0., 0., 0.], + [0., 0., 1., 0., 0.], + [0., 1., 0., 0., 0.]]) + +tensor(1.4689, grad_fn=) +''' + +์•„๋‹ˆ๋ฉด softmax์ฒ˜๋Ÿผ ๋ฏธ๋ฆฌ ๊ตฌํ˜„๋˜์–ด ์žˆ๋Š” ๊ฒƒ์„ ์‚ฌ์šฉํ•ด๋„ ๋œ๋‹ค. + +```python +# -log๋ฅผ ์ทจํ•ด์„œ ํ‰๊ท ๋‚ด๋Š” ๊ฒƒ๊นŒ์ง€๋งŒ torch ํ•จ์ˆ˜ ์‚ฌ์šฉ (Negative Log Likelihood) +F.nll_loss(F.log_softmax(z, dim=1), y) + +# NLL๊ณผ softmax๋ฅผ ๋ฌถ์–ด torch ํ•จ์ˆ˜๋กœ ์‚ฌ์šฉ +F.cross_entropy(z, y) +``` + +*** +## Training data +4์ฐจ์›์˜ input, 3๊ฐ€์ง€์˜ class๋ฅผ ๋‹ต์œผ๋กœ ๊ฐ€์ง„ ๋ฐ์ดํ„ฐ ์…‹์ด๋‹ค. + +```python +x_train = [[1, 2, 1, 1], + [2, 1, 3, 2], + [3, 1, 3, 4], + [4, 1, 5, 5], + [1, 7, 5, 5], + [1, 2, 5, 6], + [1, 6, 6, 6], + [1, 7, 7, 7]] +y_train = [2, 2, 2, 1, 1, 1, 0, 0] +x_train = torch.FloatTensor(x_train) +y_train = torch.LongTensor(y_train) +``` + +*** +## Train +Hypothesis์™€ cost์ด์™ธ์—๋Š” ํฌ๊ฒŒ ๋‹ฌ๋ผ์ง„ ์ ์€ ์—†๋‹ค. 3๊ฐ€์ง€ ๋ฐฉ์‹์˜ ๊ตฌํ˜„ ์ฝ”๋“œ๋งŒ ํ•œ๋ฒˆ ์‚ดํŽด๋ณด์ž + +
+ +### `F.softmax` + `torch.log` +```python +W = torch.zeros((4, 3), requires_grad=True) +b = torch.zeros(1, requires_grad=True) + +optimizer = optim.SGD([W, b], lr=0.1) + +nb_epochs = 1000 +for epoch in range(nb_epochs + 1): + + hypothesis = F.softmax(x_train.matmul(W) + b, dim=1) + y_one_hot = torch.zeros_like(hypothesis) + y_one_hot.scatter_(1, y_train.unsqueeze(1), 1) + cost = (y_one_hot * -torch.log(hypothesis)).sum(dim=1).mean() + + optimizer.zero_grad() + cost.backward() + optimizer.step() + + # 100๋ฒˆ๋งˆ๋‹ค ๋กœ๊ทธ ์ถœ๋ ฅ + if epoch % 100 == 0: + print('Epoch {:4d}/{} Cost: {:.6f}'.format( + epoch, nb_epochs, cost.item() + )) + +'''output +Epoch 0/1000 Cost: 1.098612 +Epoch 100/1000 Cost: 0.761050 +Epoch 200/1000 Cost: 0.689991 +Epoch 300/1000 Cost: 0.643229 +Epoch 400/1000 Cost: 0.604117 +Epoch 500/1000 Cost: 0.568255 +Epoch 600/1000 Cost: 0.533922 +Epoch 700/1000 Cost: 0.500291 +Epoch 800/1000 Cost: 0.466908 +Epoch 900/1000 Cost: 0.433507 +Epoch 1000/1000 Cost: 0.399962 +''' +``` + +

+ +### `F.cross_entropy` +softmax๊ฐ€ cross entropy์— ์†ํ•ด ์žˆ๊ธฐ ๋•Œ๋ฌธ์— hypothesis์—๋Š” softmax๊ฐ€ ์—†๋Š” ๋ชจ์Šต์„ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. + +```python +W = torch.zeros((4, 3), requires_grad=True) +b = torch.zeros(1, requires_grad=True) + +optimizer = optim.SGD([W, b], lr=0.1) + +nb_epochs = 1000 +for epoch in range(nb_epochs + 1): + + z = x_train.matmul(W) + b + cost = F.cross_entropy(z, y_train) + + optimizer.zero_grad() + cost.backward() + optimizer.step() + + if epoch % 100 == 0: + print('Epoch {:4d}/{} Cost: {:.6f}'.format( + epoch, nb_epochs, cost.item() + )) +``` + +

+ +### `nn.Module` +class๋ฅผ ์ •์˜ํ•˜์—ฌ ์‚ฌ์šฉํ•  ์ˆ˜๋„ ์žˆ๋‹ค. + +```python +class SoftmaxClassifierModel(nn.Module): + def __init__(self): + super().__init__() + self.linear = nn.Linear(4, 3) # Output์ด 3! + + def forward(self, x): + return self.linear(x) +``` + +์ด๋•Œ๋„ ๋งˆ์ฐฌ๊ฐ€์ง€๋กœ softmax๊ฐ€ cross entropy์— ์†ํ•ด ์žˆ๊ธฐ ๋•Œ๋ฌธ์— class๋ฅผ ์ •์˜ํ•  ๋•Œ๋Š” ์„ ํ˜• ํ•จ์ˆ˜ ๋ถ€๋ถ„๋งŒ ์ •์˜ํ•ด ์ค€ ๊ฒƒ์„ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. + +```python +model = SoftmaxClassifierModel() + +optimizer = optim.SGD(model.parameters(), lr=0.1) + +nb_epochs = 1000 +for epoch in range(nb_epochs + 1): + + prediction = model(x_train) + + cost = F.cross_entropy(prediction, y_train) + + optimizer.zero_grad() + cost.backward() + optimizer.step() + + if epoch % 100 == 0: + print('Epoch {:4d}/{} Cost: {:.6f}'.format( + epoch, nb_epochs, cost.item() + )) +``` \ No newline at end of file diff --git a/_posts/2022-05-04-autoencoders-3.markdown b/_posts/2022-05-04-autoencoders-3.markdown new file mode 100644 index 00000000000..65a41c4fd52 --- /dev/null +++ b/_posts/2022-05-04-autoencoders-3.markdown @@ -0,0 +1,103 @@ +--- +title: "์˜คํ† ์ธ์ฝ”๋”์˜ ๋ชจ๋“  ๊ฒƒ - 3. Autoencoders" +author: Kwon +date: 2022-05-04T00:00:00 +0900 +categories: [generative-model] +tags: [autoencoder, dae, cae, scae] +math: true +mermaid: false +--- + +์ดํ™œ์„๋‹˜์˜ [์˜คํ† ์ธ์ฝ”๋”์˜ ๋ชจ๋“  ๊ฒƒ](https://www.youtube.com/watch?v=o_peo6U7IRM) ๊ฐ•์˜๋ฅผ ๋ณธ ํ›„ ๊ณต๋ถ€ ๋ฐ ์ •๋ฆฌ๋ฅผ ๋ชฉ์ ์œผ๋กœ ์ž‘์„ฑํ•œ ๊ฒŒ์‹œ๋ฌผ์ž„์„ ์•Œ๋ ค๋“œ๋ฆฝ๋‹ˆ๋‹ค. + +*** +## Introduction +### Autoencoder +์˜คํ† ์ธ์ฝ”๋”๋Š” ์ธํฟ๊ณผ ์•„์›ƒํ’‹์ด ๊ฐ™์€ ๋„คํŠธ์›Œํฌ๋ฅผ ์˜๋ฏธํ•œ๋‹ค. Auto-associators, Diabolo nerworks, Sandglass-shaped net ๋“ฑ์˜ ์ด๋ช…์œผ๋กœ ๋ถˆ๋ฆฌ๊ธฐ๋„ ํ•˜๋ฉฐ ๊ฐ€์žฅ ๋งŽ์ด ๋ถˆ๋ฆฌ๋Š” ์ด๋ฆ„์€ ์—ญ์‹œ **Autoencoder**์ด๋‹ค. + +![Autoencoder์˜ ๋ชจ์Šต๊ณผ ๋น„์Šทํ•œ Diabolo์˜ ๋ชจ์Šต](/posting_imgs/diabolo.jpg) + +์˜คํ† ์ธ์ฝ”๋”๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์ค‘๊ฐ„์˜ ์€๋‹‰์ธต์ด ์ž˜๋กํ•œ ๋ชจ์Šต์˜ ๋„คํŠธ์›Œํฌ์ธ๋ฐ, ์ด๋•Œ ์ค‘๊ฐ„ ์€๋‹‰์ธต์„ $Z$๋ผ๊ณ  ๋ถ€๋ฅด๋ฉฐ Code, Latent Variable, Feature, Hidden representation ๋“ฑ์œผ๋กœ ๋ถˆ๋ฆฐ๋‹ค. +๊ทธ๋ž˜์„œ $Z$๋ฅผ ์–ด๋–ป๊ฒŒ ์ƒ๊ฐํ•˜๋ƒ์— ๋”ฐ๋ผ ์˜คํ† ์ธ์ฝ”๋”์—์„œ ํ•™์Šตํ•˜๋Š” ๊ณผ์ •์„ Representation Learning, Efficient Code Learning ๋“ฑ์œผ๋กœ ๋ถ€๋ฅด๊ธฐ๋„ ํ•˜์ง€๋งŒ, ๊ฒฐ๊ตญ ์ด๋“ค์€ ๋ชจ๋‘ $Z$ ๋…ธ๋“œ๋ฅผ ๋ฐฐ์šฐ๋Š” ํ•™์Šต์„ ์ด๋ฅด๋Š” ๋ง๋“ค์ด๋‹ค. + +![Autoencoder์˜ ํ˜•ํƒœ](/posting_imgs/autoencoder.png) + +์ฒ˜์Œ ์˜คํ† ์ธ์ฝ”๋”๊ฐ€ ๊ฐ๊ด‘๋ฐ›์€ ์ด์œ ๋Š” unsupervised learning ๋ฌธ์ œ์ธ Demension Redection์„ supervised learning์œผ๋กœ ๋ฐ”๊ฟ” ํ’€ ์ˆ˜ ์žˆ๋‹ค๋Š” ๊ฐœ๋… ๋•Œ๋ฌธ์ด์—ˆ๋‹ค. +์ตœ์ข… ์ถœ๋ ฅ๋ฌผ์ด ์›๋ž˜์˜ ์ž…๋ ฅ๊ณผ ๊ฐ™์€ ๋ฌธ์ œ๋กœ ๋ณด๊ณ  ํ•™์Šต์„ ํ•œ ํ›„์—, encoder ๋ถ€๋ถ„๋งŒ ๋–ผ์„œ ์‚ฌ์šฉํ•˜๋ฉด ์ฐจ์›์ถ•์†Œ๊ฐ€ ๊ฐ€๋Šฅํ•œ ๋„คํŠธ์›Œํฌ๋ฅผ ์–ป์„ ์ˆ˜ ์žˆ๊ธฐ ๋•Œ๋ฌธ์ด๋‹ค. + +์ด ๊ฒฝ์šฐ ์ž๊ธฐ ์ž์‹ ์— ๋Œ€ํ•ด ํ•™์Šต(self learning)ํ•˜๊ธฐ ๋•Œ๋ฌธ์— ์ ์–ด๋„ training DB์— ์žˆ๋˜ ๊ฒƒ๋“ค์— ๋Œ€ํ•ด์„œ๋Š” ์••์ถ•์„ ์ž˜ ํ•ด์ค€๋‹ค. ์ฆ‰, ์ตœ์†Œ ์„ฑ๋Šฅ์ด ์–ด๋А์ •๋„ ๋ณด์žฅ๋œ๋‹ค๋Š” ์žฅ์ ์ด ์žˆ๋‹ค. +ํ•˜์ง€๋งŒ ๊ฐ™์€ ์ด์œ ๋กœ ์ƒˆ๋กœ์šด ๊ฒƒ์„ ์ƒ์„ฑํ•˜๋ ค ํ•ด๋„ ๊ธฐ์กด์˜ DB์™€ ๋น„์Šทํ•œ ๊ฒฐ๊ณผ๊ฐ€ ๋งŽ์ด ๋‚˜์˜จ๋‹ค๋Š” ํ‰๊ฐ€๋ฅผ ๋“ฃ๊ธฐ๋„ ํ•˜์˜€๋‹ค. + +#### General AE(AutoEncoder) and Linear AE +General AE๋ฅผ ์กฐ๊ธˆ ๋” ์ž์„ธํžˆ ๋ณด์ž. ์œ„์— ๋‚˜์™”๋˜ ๊ทธ๋ฆผ์„ ์ž์„ธํžˆ ๋‚˜ํƒ€๋‚ด๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. + +![General Autoencoder](/posting_imgs/autoencoder-loss.png) + +์ตœ์ข… ์ถœ๋ ฅ $Y$๋Š” ์ธ์ฝ”๋”์˜ ์ถœ๋ ฅ์€ ๋””์ฝ”๋”์— ์ธํ’‹์œผ๋กœ ๋„ฃ์–ด ๋‚˜์˜จ output์œผ๋กœ ์ •์˜ํ•  ์ˆ˜ ์žˆ๊ณ , ์ด๊ฒƒ์ด ์ฒซ input $X$์™€ ๊ฐ™๊ธฐ๋ฅผ ๋ฐ”๋ผ๋ฏ€๋กœ ์ด๋“ค๋ผ๋ฆฌ์˜ error๋ฅผ ๊ตฌํ•œ๋‹ค. +์ด๋•Œ๋Š” ๋ฐ์ดํ„ฐ์˜ ๋ถ„ํฌ ํ˜•ํƒœ์— ๋”ฐ๋ผ MSE ๋˜๋Š” [cross-entropy](/posts/cross-entropy/)๋ฅผ ์‚ฌ์šฉํ•œ๋‹ค. +์ด๋ ‡๊ฒŒ ๊ตฌํ•œ error๋ฅผ ์žฌ๊ตฌ์ถ•(reconstruct) ํ•œ ๊ฒƒ์— ๋Œ€ํ•œ error๋ผ๋Š” ์˜๋ฏธ๋กœ **Reconstruction Error**๋ผ๊ณ  ํ•œ๋‹ค. + +Linear AE๋Š” ๊ธฐ๋ณธ์ ์ธ ์˜คํ† ์ธ์ฝ”๋”์˜ ๊ตฌ์กฐ์—์„œ ์€๋‹‰์ธต์„ ํ™œ์„ฑํ™” ํ•จ์ˆ˜ ์—†์ด ์„ ํ˜• ํ•จ์ˆ˜๋ฅผ ๊ทธ๋Œ€๋กœ ์‚ฌ์šฉํ•œ ๊ฒƒ์„ ๋งํ•œ๋‹ค. +PCA(Principle Component Analysis)์™€ baisis๋Š” ๋‹ค๋ฅด์ง€๋งŒ ์„ ํ˜•์ ์œผ๋กœ ์ฐจ์› ์ถ•์†Œ๋ฅผ ์ง„ํ–‰ํ•˜๊ธฐ ๋•Œ๋ฌธ์— ๊ฒฐ๊ณผ์ ์œผ๋กœ PCA์™€ ๊ฐ™์€ mainifold๋ฅผ ํ•™์Šตํ•œ๋‹ค๋Š” ํŠน์ง•์ด ์žˆ๋‹ค. + +*** +## Pretraining +### Stacking AE +ํ˜„์žฌ์˜ CNN, DNN ๋ฐฉ๋ฒ•๋ก ๋“ค์— ๋น„ํ•ด ์˜ˆ์ „์˜ ๋ฐฉ๋ฒ•๋ก ๋“ค์€ ์ ์ ˆํ•œ parameter ์ดˆ๊ธฐํ™”๊ฐ€ ์–ด๋ ค์› ๋‹ค. ์ด๋•Œ ์˜คํ† ์ธ์ฝ”๋”๋กœ pretraining์„ ํ•˜๋‹ˆ ์„ฑ๋Šฅ ๊ฐœ์„ ์ด ์ด๋ฃจ์–ด์กŒ๋‹ค๊ณ  ํ•ด์„œ ์‚ฌ์šฉ๋˜๊ธฐ ์‹œ์ž‘ํ•œ ๋ฐฉ์‹์ด๋‹ค. +pretraining ๋ฐฉ์‹์€ ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. + +![Stacking Autoencoder](/posting_imgs/stacking-autoencoder.png) + +๋จผ์ € ์›ํ•˜๋Š” ์ฐจ์›์œผ๋กœ ๋ณด๋‚ธ ํ›„, ์›๋ž˜ ๋ ˆ์ด์–ด๋กœ ๋Œ์•„์™€์„œ ์ œ๋Œ€๋กœ ๋ณต๊ตฌ๊ฐ€ ๋˜๋Š”์ง€ ํ™•์ธํ•œ๋‹ค. ์ด ๊ณผ์ •์—์„œ ์˜คํ† ์ธ์ฝ”๋” ๊ตฌ์กฐ๊ฐ€ ์‚ฌ์šฉ๋œ๋‹ค. +์ ์–ด๋„ input layer์˜ ๊ฐ’๋“ค์„ ๋ณต๊ตฌํ•  ์ˆ˜ ์žˆ๋Š” prameter๋ฅผ ์‚ฌ์šฉํ•˜์ž๋Š” ๊ฒƒ์ด๋‹ค. ์ด ๊ณผ์ •์„ ๋ชจ๋“  ๋ ˆ์ด์–ด์— ๋Œ€ํ•ด ์ง„ํ–‰ํ•˜๋ฉด์„œ prameter๋ฅผ ๊ณ ์ •ํ•œ๋‹ค. + +ํ•™์Šต์ด ๋ชจ๋‘ ๋๋‚œ ๋’ค์—๋Š” random inintialization์œผ๋กœ backpropagationํ•œ๋‹ค. + +*** +## DAE(Denoising AE) +์›๋ž˜์˜ ์˜คํ† ์ธ์ฝ”๋” ๋ฐฉ์‹์œผ๋กœ ํ•™์Šต์„ ํ•˜๋˜ input ๋ฐ์ดํ„ฐ์— noise๋ฅผ ์ถ”๊ฐ€ํ•˜์—ฌ ํ•™์Šตํ•˜๋Š” ๋ฐฉ์‹์ด๋‹ค. ์ด๋•Œ ํ•ต์‹ฌ์€ noise๋ฅผ (์ด๋ฏธ์ง€ ๊ฐ™์€ ๊ฒƒ์„) ์‚ฌ๋žŒ์ด ๋ดค์„ ๋•Œ ๋‹ค๋ฅด์ง€ ์•Š์„ ์ •๋„๋งŒ ์ถ”๊ฐ€ํ•œ๋‹ค๋Š” ๊ฒƒ์ด๋‹ค. + +์ด๋Ÿฐ ์‹์œผ๋กœ ํ•™์Šต์„ ํ•˜๋ฉด ์‚ฌ๋žŒ์ด ๋ดค์„ ๋•Œ ๊ฐ™์€ ๊ฒƒ(manifold์ƒ ๊ฑฐ์˜ ๊ฐ™์€ ๊ฒƒ)์ด ๋ฌด์—‡์ธ์ง€ ํ•™์Šตํ•  ์ˆ˜ ์žˆ๊ฒŒ ๋œ๋‹ค๋Š” concept์ด๋‹ค. + +![](/posting_imgs/dae.png" description="Denoising Autoencoder" %} + +noise๋ฅผ ์ถ”๊ฐ€ํ•˜๊ณ  noise๊ฐ€ ์—†๋Š” ์›๋ž˜ ๋ฐ์ดํ„ฐ์™€ error๋ฅผ ๊ตฌํ•˜๋ฉด์„œ manifold ์ƒ ๊ฐ™์€ ๊ฒƒ์ด ๋ญ”์ง€ ํ•™์Šตํ•˜๋Š” ๊ตฌ์กฐ์ด๋‹ค. + +์‹ค์ œ๋กœ ์ด๋ ‡๊ฒŒ noise๋ฅผ ์ถ”๊ฐ€ํ•˜์—ฌ ํ•™์Šตํ•œ ๊ฒฐ๊ณผ ์ด๋ฏธ์ง€์˜ ํŠน์ง•์„ ์ฐพ๋Š” edge detection์ด ์ž˜ ๋œ ๋ชจ์Šต์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. ์ž์„ธํ•œ ๊ฒƒ์€ ๋‹ค์Œ [๋งํฌ](http://videolectures.net/deeplearning2015_vincent_autoencoders/?q=vincent%20autoencoder)์—์„œ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. + +![](/posting_imgs/dae-sample1.png" description="DAE์™€ ๋‹ค๋ฅธ AE๋“ค๊ณผ์˜ edge ๋น„๊ต" %} + +๋˜ํ•œ noise๋ฅผ ์ถ”๊ฐ€ํ• ์ˆ˜๋ก edge๊ฐ€ ๊ฐ•ํ•˜๊ฒŒ ๋‚˜ํƒ€๋‚˜๋Š” ๊ฒƒ๋„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. + +![](/posting_imgs/dae-sample2.png" description="noise ์ •๋„์— ๋”ฐ๋ฅธ edge ๋น„๊ต" %} + +๋‹ค๋งŒ noise ์ถ”๊ฐ€๊ฐ€ ๋„ˆ๋ฌด ์‹ฌํ•ด ์›๋ณธ์˜ ๋ฐ์ดํ„ฐ์™€ manifold ์ƒ์—์„œ ์ฐจ์ด๊ฐ€ ๋งŽ์ด ๋‚˜๊ฒŒ ๋˜๋ฉด, noise๋ฅผ ์ถ”๊ฐ€ํ•œ ์˜๋ฏธ๊ฐ€ ํ‡ด์ƒ‰๋˜๊ธฐ ๋•Œ๋ฌธ์— ์ผ์ • noise ์ด์ƒ์—์„œ๋Š” error๊ฐ€ ๋‹ค์‹œ ๋†’์•„์ง€๋Š” ๊ฒƒ์„ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. + +![](/posting_imgs/dae-graph.png" description="noise-error graph" %} + +*** +## CAE(Cntractive AE) +### Stochastic Contractive AE(SCAE) +DAE์—์„œ loss์˜ ์˜๋ฏธ๋ฅผ ํ•ด์„ํ•˜์—ฌ ๋‹ค๋ฅด๊ฒŒ ํ‘œํ˜„ํ•œ ๊ฒƒ์ด๋‹ค. DAE์˜ loss๋ฅผ ์ƒ๊ฐํ•ด ๋ณด๋ฉด $g$, $h$์ค‘ $h$๋Š” ํŠนํžˆ ๋ฐ์ดํ„ฐ๊ฐ€ ์กฐ๊ธˆ๋งŒ ๋ฐ”๋€Œ1์–ด๋„ manifold ์ƒ์—์„œ ๊ฐ™์€ smaple๋กœ ๋งค์นญ์ด ๋˜๊ฒŒ ํ•™์Šต์„ ํ•ด์•ผ ํ•œ๋‹ค๊ณ  ๋ณผ ์ˆ˜ ์žˆ๋‹ค. +์ด ์˜๋ฏธ๋ฅผ ์žฌ๊ตฌ์„ฑ ํ•ด๋ณด๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์ ์„ ์ˆ˜ ์žˆ๋‹ค. + +![](/posting_imgs/scae.png){: width="80%"} + +reconstruction error๋ฅผ ํ†ตํ•ด ์›๋ž˜ ์˜คํ† ์ธ์ฝ”๋”์˜ ํ˜•ํƒœ๋Œ€๋กœ ์ž…์ถœ๋ ฅ์ด ๋™์ผํ•˜๊ฒŒ ํ•™์Šตํ•˜๊ณ , **stochastic regularization**ํ•ญ์„ ํ†ตํ•ด manifold ์ƒ์—์„œ ๊ฑฐ๋ฆฌ๊ฐ€ ๊ฐ™๊ฒŒ ํ•™์Šตํ•˜๋„๋ก loss๋ฅผ ๊ตฌ์„ฑํ•˜์˜€๋‹ค. + +### CAE +SCAE์˜ stochastic regularization ํ•ญ์„ ํ…Œ์ผ๋Ÿฌ ์ „๊ฐœ๋ฅผ ํ†ตํ•ด ๊ทผ์‚ฌํ•˜์—ฌ, Frobenius norm ํ˜•ํƒœ๋กœ ๋งŒ๋“ค์–ด, analytic regularization๋กœ ์ ์šฉํ•œ ๊ฒƒ์ด CAE์ด๋‹ค. +ํ•˜์ง€๋งŒ 1์ฐจ ๋ฏธ๋ถ„ ํ•ญ์—์„œ ๋Š์–ด ๊ทผ์‚ฌํ•œ ๊ฒƒ์ด๋ฏ€๋กœ ํ•œ ์ง€์ ์˜ ๊ทผ์ฒ˜์—์„œ๋งŒ ์œ ์˜๋ฏธํ•œ ๊ทผ์‚ฌ๊ฐ€ ๋œ๋‹ค๊ณ  ํ•  ์ˆ˜ ์žˆ๋‹ค. + +![](/posting_imgs/cae-regular.png){: width="80%"} + +์œ„ ์‹์œผ๋กœ regularization ํ•ญ์„ ๋Œ€์ฒดํ•˜์—ฌ ๋‹ค์Œ๊ณผ ๊ฐ™์ด CAE loss๋ฅผ ์ ์šฉํ•œ๋‹ค. + +![](/posting_imgs/cae.png){: width="80%"} + +#### DAE vs CAE +๊ฒฐ๊ตญ DAE์™€ CAE์˜ concept ์ž์ฒด๋Š” mainfold ์œ„์˜ ๊ฑฐ๋ฆฌ๋ฅผ ์ค„์ธ๋‹ค๋Š” ๊ฒƒ์œผ๋กœ ๊ฐ™์ง€๋งŒ, ๊ทธ๊ฒƒ์„ ์ ์šฉํ•˜๋Š” ๋ฐฉ์‹์ด ๋‹ฌ๋ž๋‹ค. + +DAE์˜ ๊ฒฝ์šฐ noise๊ฐ€ ์ฒจ๊ฐ€๋œ input์œผ๋กœ ํ•™์Šตํ•˜์—ฌ manifold ์ƒ์˜ ๊ฑฐ๋ฆฌ๋ฅผ ์ขํžˆ๋ ค ํ–ˆ๋‹ค๋ฉด, CAE๋Š” regularization์„ ํ†ตํ•ด ์ด๋ฅผ ํ•ด๊ฒฐํ•˜๋ ค ํ–ˆ๋‹ค. + +์‹ค์ œ๋กœ๋Š” CAE๋ณด๋‹ค DAE๊ฐ€ ๋” ๋งŽ์ด ์“ฐ์ธ๋‹ค๊ณ  ํ•œ๋‹ค. ์ฐธ๊ณ ๋กœ **AE, DAE, VAE(Variational AE)**๊ฐ€ ๋งŽ์ด ์“ฐ์ด๋Š” ์˜คํ† ์ธ์ฝ”๋” ์ข…๋ฅ˜๋ผ๊ณ  ํ•œ๋‹ค. \ No newline at end of file diff --git a/_posts/2022-05-07-dlZeroToAll-PyTorch-7-1.markdown b/_posts/2022-05-07-dlZeroToAll-PyTorch-7-1.markdown new file mode 100644 index 00000000000..ce8be0ce66b --- /dev/null +++ b/_posts/2022-05-07-dlZeroToAll-PyTorch-7-1.markdown @@ -0,0 +1,334 @@ +--- +title: "๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹ 2 - Lab7-1: Tips" +author: Kwon +date: 2022-05-07T23:00:00 +0900 +categories: [pytorch, study] +tags: [mle, overfitting, regularization, learning-rate] +math: true +mermaid: false +--- + +[๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹](https://deeplearningzerotoall.github.io/season2/lec_pytorch.html) Lab7-1: Tips ๊ฐ•์˜๋ฅผ ๋ณธ ํ›„ ๊ณต๋ถ€๋ฅผ ๋ชฉ์ ์œผ๋กœ ์ž‘์„ฑํ•œ ๊ฒŒ์‹œ๋ฌผ์ž…๋‹ˆ๋‹ค. + +*** + +## Maximum Likelihood Estimate(MLE) + +### Probalility vs Likelihood + +Probalility(ํ™•๋ฅ )๋Š” ์šฐ๋ฆฌ๊ฐ€ ์ž˜ ์•Œ๊ณ  ์žˆ๋“ฏ์ด ์–ด๋–ค ๊ด€์ธก๊ฐ’์ด ๋ฐœ์ƒํ•  ์ •๋„๋ฅผ ๋œปํ•˜๋Š”๋ฐ, ์ด๋Š” ๋‹ค๋ฅด๊ฒŒ ๋งํ•˜๋ฉด ํ•œ ํ™•๋ฅ ๋ถ„ํฌ์—์„œ ํ•ด๋‹น ๊ด€์ธก๊ฐ’ ๋˜๋Š” ๊ด€์ธก ๊ตฌ๊ฐ„์ด ์–ผ๋งˆ์˜ ํ™•๋ฅ ์„ ๊ฐ€์ง€๋Š”๊ฐ€๋ฅผ ๋œปํ•œ๋‹ค. ์ด์— ๋ฐ˜ํ•ด Likelihood(์šฐ๋„, ๊ฐ€๋Šฅ๋„)๋Š” ์ด ๊ด€์ธก๊ฐ’์ด ์ฃผ์–ด์ง„ ํ™•๋ฅ  ๋ถ„ํฌ์—์„œ ํ™•๋ฅ ์ด ์–ผ๋งˆ๋‚˜ ๋˜๋Š”์ง€๋ฅผ ๋งํ•œ๋‹ค. + +์ค‘์š”ํ•œ ์ฐจ์ด์ ์€ **ํ™•๋ฅ **์€ ์ด๋ฏธ ๋ฐฐ๊ฒฝ์ด ๋˜๋Š” ํ™•๋ฅ  ๋ถ„ํฌ๊ฐ€ **๊ณ ์ •**๋˜์–ด ์žˆ๊ณ , **์šฐ๋„**๋Š” **๊ณ ์ •๋˜์–ด ์žˆ์ง€ ์•Š๋‹ค**๋Š” ๊ฒƒ์ด๋‹ค. + +### MLE + +์ด๋Ÿฐ ์˜๋ฏธ๋ฅผ ๊ฐ€์ง„ ์šฐ๋„๋ฅผ ์ตœ๋Œ€ํ™” ํ•œ๋‹ค๋Š” ๊ฒƒ์€ ๊ด€์ธก๋œ ๊ฒฐ๊ณผ์— ๋งž๋Š” ํ™•๋ฅ  ๋ถ„ํฌ๋ฅผ ์ฐพ์•„๋‚ธ๋‹ค๋Š” ๊ฒƒ์œผ๋กœ ์ƒ๊ฐํ•  ์ˆ˜ ์žˆ๋‹ค. + +๋ฒ ๋ฅด๋ˆ„์ด ๋ถ„ํฌ๋ฅผ ์˜ˆ๋กœ ๋“ค๋ฉด ํ™•๋ฅ ๋ถ„ํฌ๋ฅผ ๊ฒฐ์ •ํ•˜๋Š” ์•„๋ž˜์™€ ๊ฐ™์€ ์ƒํ™ฉ์—์„œ $\theta$๋ฅผ ๋ณ€ํ™”์‹œํ‚ค๋ฉด์„œ ์ฃผ์–ด์ง„ ๊ฐ’์— ๋งž์ถฐ ํ™•๋ฅ  ๋ถ„ํฌ๋ฅผ ์ตœ์ ํ™” ํ•˜๋Š” ๊ฒƒ์ด๋‹ค. + +![](/posting_imgs/lab7-1-1.jpg){: width="40%"} + +์ตœ์ ํ™” ํ•˜๋Š” ๊ณผ์ •์€ ๋งˆ์ฐฌ๊ฐ€์ง€๋กœ gradient descent(ascent)๋ฅผ ํ†ตํ•ด ์ง„ํ–‰ํ•œ๋‹ค. + +*** + +## Overfitting + +์•„๋ž˜์™€ ๊ฐ™์ด Train set์— ๋Œ€ํ•ด ๊ณผํ•˜๊ฒŒ ๋งž์ถฐ ํ•™์Šตํ•œ ๊ฒƒ์„ ๋งํ•œ๋‹ค. ์ด ๊ฒฝ์šฐ ํ•™์Šตํ•  ๋•Œ ์‚ฌ์šฉํ•œ ๋ฐ์ดํ„ฐ ์…‹์ด ์•„๋‹Œ ๋ฐ์ดํ„ฐ๊ฐ€ ์ฃผ์–ด์กŒ์„ ๋•Œ์˜ ์˜ˆ์ธก ์„ฑ๋Šฅ์ด ๊ธ‰๊ฒฉํ•˜๊ฒŒ ๋–จ์–ด์ง€๊ฒŒ ๋œ๋‹ค. + +![](/posting_imgs/lab7-1-2.jpg){: width="60%"} + +์ด๋Ÿฌํ•œ overfitting(๊ณผ์ ํ•ฉ)์„ ๋ง‰๊ธฐ ์œ„ํ•ด test set๊ณผ vaild set์„ ์‚ฌ์šฉํ•˜๊ฒŒ ๋œ๋‹ค. ํ•™์Šต์„ ํ•  ๋•Œ ํ•œ ๋ฒˆ๋„ ๋ณด์ง€ ๋ชปํ–ˆ๋˜ set์„ ์ด์šฉํ•˜์—ฌ ๊ณผ์ ํ•ฉ ์—ฌ๋ถ€๋ฅผ ํ™•์ธํ•˜๋Š” ๊ฒƒ์ด๋‹ค. + +![](/posting_imgs/lab7-1-3.jpg){: width="60%"} + +์œ„์™€ ๊ฐ™์ด train set์— ๋Œ€ํ•œ loss๋Š” ๊ฐ์†Œํ•˜์ง€๋งŒ valid set์— ๋Œ€ํ•œ loss๊ฐ€ ๊ฐ์†Œํ•˜์ง€ ์•Š์„ ๋•Œ ๊ณผ์ ํ•ฉ์ด ๋ฐœ์ƒํ•œ๋‹ค๊ณ  ํŒ๋‹จํ•  ์ˆ˜ ์žˆ๋‹ค. + +### Regularization + +๊ณผ์ ํ•ฉ์„ ๋ง‰๊ธฐ ์œ„ํ•œ ๋ฐฉ๋ฒ•์—๋Š” ๋” ๋งŽ์€ ๋ฐ์ดํ„ฐ๋ฅผ ์‚ฌ์šฉํ•œ๋‹ค๊ฑฐ๋‚˜, ๋” ์ ์€ feature๋ฅผ ์‚ฌ์šฉํ•˜๋Š” ๋ฐฉ๋ฒ• ๋“ฑ ์—ฌ๋Ÿฌ ๋ฐฉ๋ฒ•์ด ์žˆ๊ณ , ๊ทธ ์ค‘ ํ•˜๋‚˜๊ฐ€ regularization์ด๋‹ค. + +**Regularization**์—๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™์€ ๋ฐฉ๋ฒ•๋“ค์ด ์žˆ๋‹ค. + +* Early Stoping: valid set์˜ loss๊ฐ€ ์ค„์–ด๋“ค์ง€ ์•Š์„ ๋•Œ ํ•™์Šต์„ ์ค‘์ง€ํ•œ๋‹ค. +* Reducing Network Size +* Weight Decay: weight๊ฐ€ ๋„ˆ๋ฌด ์ปค์ง€์ง€ ์•Š๋„๋ก wight๊ฐ€ ์ปค์งˆ์ˆ˜๋ก ํ•จ๊ป˜ ์ปค์ง€๋Š” penalty๋ฅผ ๋ถ€์—ฌํ•œ๋‹ค. +* Dropout: node์˜ ์ผ๋ถ€๋ฅผ ๊บผ์„œ ํ•™์Šตํ•˜๋Š” node๋ฅผ ์ค„์ธ๋‹ค. +* Batch Normalization: ํ•™์Šต ์ค‘์— ๋ฐฐ์น˜ ๋‹จ์œ„๋กœ ์ •๊ทœํ™” ํ•˜๋Š” ๊ฒƒ + +*** + +## with Code + +### Import +```python +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.optim as optim + +# ์‹œ๋“œ ๊ณ ์ • + torch.manual_seed(1) +``` + +### Data + +3์ฐจ์›์˜ input๊ณผ 3๊ฐœ์˜ class๋ฅผ ๊ฐ€์ง€๊ณ  ์žˆ๋Š” label๋กœ ํ•™์Šต์„ ์ง„ํ–‰ํ•˜๋ฉฐ, train set๊ณผ test set์˜ ๋น„์œจ์€ 8:3์ด๋‹ค. + +```python +# train set +x_train = torch.FloatTensor([[1, 2, 1], + [1, 3, 2], + [1, 3, 4], + [1, 5, 5], + [1, 7, 5], + [1, 2, 5], + [1, 6, 6], + [1, 7, 7] + ]) +y_train = torch.LongTensor([2, 2, 2, 1, 1, 1, 0, 0]) + +# test set +x_test = torch.FloatTensor([[2, 1, 1], [3, 1, 2], [3, 3, 4]]) +y_test = torch.LongTensor([2, 2, 2]) +``` + +### Model + +[lab6](/posts/dlZeroToAll-PyTorch-6/)์—์„œ ๋‹ค๋ค˜๋˜ softamx model์„ ์‚ฌ์šฉํ•˜์—ฌ ํ•™์Šต์„ ์ง„ํ–‰ํ•œ๋‹ค. + +๋ฌผ๋ก  ์ž…์ถœ๋ ฅ ์ฐจ์›์€ ๋งž์ถฐ์ค˜์•ผ ํ•˜๊ธฐ ๋•Œ๋ฌธ์— `self.linear = nn.Linear(3, 3)`๋กœ ์„ ํ˜• ๋ชจ๋ธ์„ ์ •์˜ํ•˜๋Š” ๋ถ€๋ถ„๋งŒ ๋‹ฌ๋ผ์กŒ๋‹ค. + +```python +class SoftmaxClassifierModel(nn.Module): + def __init__(self): + super().__init__() + self.linear = nn.Linear(3, 3) + def forward(self, x): + return self.linear(x) + +model = SoftmaxClassifierModel() +``` + +### Train + +์ด ๋ถ€๋ถ„ ์—ญ์‹œ ๊ธฐ์กด์˜ ํ‹€์„ ๋ฒ—์–ด๋‚˜์ง€ ์•Š๋Š”๋‹ค. + +```python +# optimizer ์„ค์ • +optimizer = optim.SGD(model.parameters(), lr=0.1) + +def train(model, optimizer, x_train, y_train): + nb_epochs = 20 + for epoch in range(nb_epochs): + + # H(x) ๊ณ„์‚ฐ + prediction = model(x_train) + + # cost ๊ณ„์‚ฐ + cost = F.cross_entropy(prediction, y_train) + + # cost๋กœ H(x) ๊ฐœ์„  + optimizer.zero_grad() + cost.backward() + optimizer.step() + + print('Epoch {:4d}/{} Cost: {:.6f}'.format( + epoch, nb_epochs, cost.item() + )) + +def test(model, optimizer, x_test, y_test): + prediction = model(x_test) + predicted_classes = prediction.max(1)[1] + correct_count = (predicted_classes == y_test).sum().item() + cost = F.cross_entropy(prediction, y_test) + + print('Accuracy: {}% Cost: {:.6f}'.format( + correct_count / len(y_test) * 100, cost.item() + )) + +train(model, optimizer, x_train, y_train) + +test(model, optimizer, x_test, y_test) + +'''output +Epoch 0/20 Cost: 2.203667 +Epoch 1/20 Cost: 1.199645 +Epoch 2/20 Cost: 1.142985 +Epoch 3/20 Cost: 1.117769 +Epoch 4/20 Cost: 1.100901 +Epoch 5/20 Cost: 1.089523 +Epoch 6/20 Cost: 1.079872 +Epoch 7/20 Cost: 1.071320 +Epoch 8/20 Cost: 1.063325 +Epoch 9/20 Cost: 1.055720 +Epoch 10/20 Cost: 1.048378 +Epoch 11/20 Cost: 1.041245 +Epoch 12/20 Cost: 1.034285 +Epoch 13/20 Cost: 1.027478 +Epoch 14/20 Cost: 1.020813 +Epoch 15/20 Cost: 1.014279 +Epoch 16/20 Cost: 1.007872 +Epoch 17/20 Cost: 1.001586 +Epoch 18/20 Cost: 0.995419 +Epoch 19/20 Cost: 0.989365 + +Accuracy: 0.0% Cost: 1.425844 +''' +``` + +์ž„์˜๋กœ ๋„ฃ์€ ๊ฐ’๋“ค์ด๋ผ ์˜ˆ์ธก์˜ ์˜๋ฏธ๋Š” ํฌ๊ฒŒ ์—†์–ด ๋ณด์ธ๋‹ค. ๋ชจ๋ธ์„ ์–ด๋–ป๊ฒŒ ํ•™์Šตํ•˜๋Š”์ง€์— ๋Œ€ํ•œ ์ฝ”๋“œ๋งŒ ์ฐธ๊ณ ํ•˜๋ฉด ๋  ๊ฒƒ ๊ฐ™๋‹ค. + +*** + +## Learning Rate + +\\[ W := W - \alpha \nabla W \,\,\left(\alpha = learning\,\,rate\right)\\] + +์œ„์™€ ๊ฐ™์€ gradient descent ์‹์—์„œ $\alpha$๊ฐ€ learning rate(ํ•™์Šต๋ฅ )์ด๋‹ค. ํ•™์Šตํ•˜๋Š” ๋น„์œจ์ด๋ผ๊ณ  ๊ฐ„๋‹จํ•˜๊ฒŒ ์–ธ๊ธ‰ํ•˜๊ณ  ๋„˜์–ด๊ฐ”์—ˆ๋Š”๋ฐ, ์ด๋ฒˆ์—๋Š” ์–ด๋–ค ์˜ํ–ฅ์„ ๋ฏธ์น˜๋Š”์ง€ ํ™•์ธํ•ด ๋ณด์ž. + +```python +model = SoftmaxClassifierModel() +optimizer = optim.SGD(model.parameters(), lr=1e5) +train(model, optimizer, x_train, y_train) + +'''output +Epoch 0/20 Cost: 1.280268 +Epoch 1/20 Cost: 976950.812500 +Epoch 2/20 Cost: 1279135.125000 +Epoch 3/20 Cost: 1198379.000000 +Epoch 4/20 Cost: 1098825.875000 +Epoch 5/20 Cost: 1968197.625000 +Epoch 6/20 Cost: 284763.250000 +Epoch 7/20 Cost: 1532260.125000 +Epoch 8/20 Cost: 1651504.000000 +Epoch 9/20 Cost: 521878.500000 +Epoch 10/20 Cost: 1397263.250000 +Epoch 11/20 Cost: 750986.250000 +Epoch 12/20 Cost: 918691.500000 +Epoch 13/20 Cost: 1487888.250000 +Epoch 14/20 Cost: 1582260.125000 +Epoch 15/20 Cost: 685818.062500 +Epoch 16/20 Cost: 1140048.750000 +Epoch 17/20 Cost: 940566.500000 +Epoch 18/20 Cost: 931638.250000 +Epoch 19/20 Cost: 1971322.625000 +''' +``` + +ํ•™์Šต๋ฅ ์ด ๋„ˆ๋ฌด ํฐ ๊ฒฝ์šฐ์ด๋‹ค. ํ•™์Šต๋ฅ ์ด ํฌ๋ฉด ํ•œ๋ฒˆ์— ํ•™์Šตํ•˜๋ ค๋Š” ์ •๋„๊ฐ€ ์ปค์ง€๋Š”๋ฐ ์ด ์ •๋„๊ฐ€ ๋„ˆ๋ฌด ํฌ๊ฒŒ ๋˜๋ฉด, ์ ์  gradient๊ฐ€ ์ปค์ง€๋ฉด์„œ ๋ฐœ์‚ฐ(overshooting)ํ•˜๊ฒŒ ๋œ๋‹ค. + +์œ„ ์ฝ”๋“œ์˜ ๊ฒฐ๊ณผ๋ฅผ ๋ณด๋ฉด cost๊ฐ€ ํ•™์Šตํ•  ์ˆ˜๋ก ์ปค์ง€๊ณ  ์žˆ๋Š” ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. + +```python +model = SoftmaxClassifierModel() +optimizer = optim.SGD(model.parameters(), lr=1e-10) +train(model, optimizer, x_train, y_train) + +'''output +Epoch 0/20 Cost: 3.187324 +Epoch 1/20 Cost: 3.187324 +Epoch 2/20 Cost: 3.187324 +Epoch 3/20 Cost: 3.187324 +Epoch 4/20 Cost: 3.187324 +Epoch 5/20 Cost: 3.187324 +Epoch 6/20 Cost: 3.187324 +Epoch 7/20 Cost: 3.187324 +Epoch 8/20 Cost: 3.187324 +Epoch 9/20 Cost: 3.187324 +Epoch 10/20 Cost: 3.187324 +Epoch 11/20 Cost: 3.187324 +Epoch 12/20 Cost: 3.187324 +Epoch 13/20 Cost: 3.187324 +Epoch 14/20 Cost: 3.187324 +Epoch 15/20 Cost: 3.187324 +Epoch 16/20 Cost: 3.187324 +Epoch 17/20 Cost: 3.187324 +Epoch 18/20 Cost: 3.187324 +Epoch 19/20 Cost: 3.187324 +''' +``` + +์ด๋ฒˆ์—๋Š” ํ•™์Šต๋ฅ ์ด ๋„ˆ๋ฌด ์ž‘์€ ๊ฒฝ์šฐ์ด๋‹ค. ์ด ๋•Œ๋Š” ๋ฐœ์‚ฐํ•  ํ™•๋ฅ ์€ ๊ฑฐ์˜ ์—†๊ฒ ์ง€๋งŒ ํ•™์Šต์ด ๋„ˆ๋ฌด ๋А๋ฆฌ๊ฑฐ๋‚˜ ์ง„ํ–‰์ด ๋˜์ง€ ์•Š๋Š”๋‹ค. + +์œ„ ์ฝ”๋“œ์—์„œ ๋ณด๋ฉด cost๊ฐ€ ๋ณ€ํ•˜์ง€ ์•Š๋Š” ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. + +๊ทธ๋ž˜์„œ ์ ์ ˆํ•œ ํ•™์Šต๋ฅ ์„ ์ฐพ๊ธฐ ์œ„ํ•ด์„œ๋Š” ๋ฐœ์‚ฐํ•˜๋ฉด ์ž‘๊ฒŒ, cost๊ฐ€ ์ค„์–ด๋“ค์ง€ ์•Š์œผ๋ฉด ํฌ๊ฒŒ ์กฐ์ •ํ•˜๋Š” ๊ฒƒ์ด ๋ฐ”๋žŒ์งํ•˜๋‹ค. + +*** + +## Data Preprocessing + +ํ•™์Šต์ด ์ž˜ ๋  ์ˆ˜ ์žˆ๋„๋ก data๋ฅผ ํ•™์Šต ์ „์— ์ฒ˜๋ฆฌํ•ด ์ฃผ๋Š” ๊ฒƒ์„ Data Preprocessing(๋ฐ์ดํ„ฐ ์ „์ฒ˜๋ฆฌ)์ด๋ผ๊ณ  ํ•œ๋‹ค. + +### Standardization + +์ด๋Ÿฐ ์ „์ฒ˜๋ฆฌ ์ค‘ ํ•˜๋‚˜์ธ Standardization(์ •๊ทœํ™”)์„ ํ•œ๋ฒˆ ์•Œ์•„๋ณด์ž. ์ •๊ทœํ™”์˜ ์‹์€ ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์ฃผ์–ด์ง„๋‹ค. + +\\[ x'_j = \frac{x_j - \mu_j}{\sigma_j} \\] + +์—ฌ๊ธฐ์„œ $\sigma$๋Š” ํ‘œ์ค€ ๋ถ„์‚ฐ, $\mu$๋Š” ํ‰๊ท ์„ ์˜๋ฏธํ•œ๋‹ค. ์ด๋“ค์„ ์ฝ”๋“œ๋กœ ํ‘œํ˜„ํ•˜๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. + +```python +mu = x_train.mean(dim=0) +sigma = x_train.std(dim=0) +norm_x_train = (x_train - mu) / sigma + +print(norm_x_train) + +'''output +tensor([[-1.0674, -0.3758, -0.8398], + [ 0.7418, 0.2778, 0.5863], + [ 0.3799, 0.5229, 0.3486], + [ 1.0132, 1.0948, 1.1409], + [-1.0674, -1.5197, -1.2360]]) +''' +``` + +์ด๊ฑธ ์ด์šฉํ•ด์„œ training์„ ํ•˜๋ ค๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™์€ ๊ฒฐ๊ณผ๊ฐ€ ๋‚˜์˜จ๋‹ค. + +``` +train(model, optimizer, norm_x_train, y_train) + +'''output +Epoch 0/20 Cost: 29785.091797 +Epoch 1/20 Cost: 18906.164062 +Epoch 2/20 Cost: 12054.674805 +Epoch 3/20 Cost: 7702.029297 +Epoch 4/20 Cost: 4925.733398 +Epoch 5/20 Cost: 3151.632568 +Epoch 6/20 Cost: 2016.996094 +Epoch 7/20 Cost: 1291.051270 +Epoch 8/20 Cost: 826.505310 +Epoch 9/20 Cost: 529.207336 +Epoch 10/20 Cost: 338.934204 +Epoch 11/20 Cost: 217.153549 +Epoch 12/20 Cost: 139.206741 +Epoch 13/20 Cost: 89.313782 +Epoch 14/20 Cost: 57.375462 +Epoch 15/20 Cost: 36.928429 +Epoch 16/20 Cost: 23.835772 +Epoch 17/20 Cost: 15.450428 +Epoch 18/20 Cost: 10.077808 +Epoch 19/20 Cost: 6.633700 +''' +``` + +์ •๊ทœํ™”๊ฐ€ ํ•˜๋ ค๋Š” ํ•™์Šต์— ์ •๋ง ์ข‹์€์ง€๋Š” ํ•™์Šตํ•˜์—ฌ ๊ฒฐ๊ณผ๋ฅผ ํ™•์ธํ•˜๋Š” ๊ฒƒ์ด ๊ฐ€์žฅ ์ข‹๊ฒ ์ง€๋งŒ, ์ด๋ก ์ ์œผ๋กœ ๋ถ„์„์„ ํ•˜๋ฉด data ์ฐจ์› ๊ฐ„์˜ ๊ฐ’์˜ ์ฐจ์ด๊ฐ€ ํฌ๋‹ค๊ฑฐ๋‚˜ ํ•  ๋•Œ ์ง„ํ–‰ํ•˜๋ฉด ์ข‹์€ ํšจ๊ณผ๋ฅผ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. + +```python +y = torch.FloatTensor([[0.0011, 2000, 1], + [0.001, 3000, 2], + [0.0001, 3000, 4], + [0.0021, 5000, 5], + [0.0131, 7000, 5], + [0.0211, 2000, 5], + [0.1211, 6000, 6], + [0.0001, 7000, 7] + ]) +``` + +์˜ˆ๋ฅผ ๋“ค์–ด ์œ„์™€ ๊ฐ™์€ target์ด ์žˆ๋‹ค. ์ด ๋ฐ์ดํ„ฐ์˜ ๊ฐ’์„ ์ฐจ์›๋ณ„๋กœ ๋น„๊ตํ•ด ๋ณด๋ฉด, ๊ทธ ์ฐจ์ด๊ฐ€ ๋„ˆ๋ฌด ํฐ ๊ฒƒ์„ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. +์ด๋ ‡๊ฒŒ ๋˜์–ด๋ฒ„๋ฆฌ๋ฉด 2๋ฒˆ์งธ ๊ฐ’(1000 ๋‹จ์œ„)์— ๋Œ€ํ•œ loss๋ฅผ ์ตœ์†Œํ™” ํ•˜๋Š” ๊ฒƒ์ด ํ›จ์”ฌ ์ด๋“์ด๋ผ๊ณ  ์ƒ๊ฐํ•˜์—ฌ ๋‹ค๋ฅธ ๊ฐ’๋“ค์— ๋Œ€ํ•œ ํ•™์Šต์ด ์ œ๋Œ€๋กœ ๋˜์ง€ ์•Š์„ ์ˆ˜ ์žˆ๋‹ค. + +์ด๋•Œ ์ •๊ทœํ™”๋ฅผ ํ†ตํ•ด ์ ์ ˆํ•œ ๋ฒ”์œ„๋กœ ๊ฐ’์„ ์žก์•„์ฃผ๋ฉด ๋ชจ๋“  ๋ฐ์ดํ„ฐ๋“ค์ด ๊ท ๋“ฑํ•˜๊ฒŒ ํ•™์Šต์ด ๋  ์ˆ˜ ์žˆ๋‹ค. \ No newline at end of file diff --git a/_posts/2022-05-10-dlZeroToAll-PyTorch-7-2.markdown b/_posts/2022-05-10-dlZeroToAll-PyTorch-7-2.markdown new file mode 100644 index 00000000000..dcd7ec02a41 --- /dev/null +++ b/_posts/2022-05-10-dlZeroToAll-PyTorch-7-2.markdown @@ -0,0 +1,162 @@ +--- +title: "๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹ 2 - Lab7-2: MNIST Intoduction" +author: Kwon +date: 2022-05-10T23:00:00 +0900 +categories: [pytorch, study] +tags: [mnist] +math: true +mermaid: false +--- + +[๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹](https://deeplearningzerotoall.github.io/season2/lec_pytorch.html) Lab7-2: MNIST Intoduction ๊ฐ•์˜๋ฅผ ๋ณธ ํ›„ ๊ณต๋ถ€๋ฅผ ๋ชฉ์ ์œผ๋กœ ์ž‘์„ฑํ•œ ๊ฒŒ์‹œ๋ฌผ์ž…๋‹ˆ๋‹ค. + +*** + +## MNIST dataset + +MNIST ๋ฐ์ดํ„ฐ ์…‹์€ ์ˆซ์ž ์†๊ธ€์”จ๋ฅผ ๋ชจ์•„๋†“์€ ๋ฐ์ดํ„ฐ ์…‹์ด๋‹ค. ์‚ฌ๋žŒ๋“ค์ด ์ ์€ ์ˆซ์ž๋“ค์„ ์šฐ์ฒด๊ตญ์—์„œ ์ž๋™์œผ๋กœ ์ฒ˜๋ฆฌํ•˜๊ธฐ ์œ„ํ•ด ๋งŒ๋“ค์–ด์ง„ ๊ฒƒ์ด ์ด ์…‹์˜ ์‹œ์ž‘์ ์ด๋ผ๊ณ  ํ•œ๋‹ค. + +MNIST๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™์ด 28x28 ํฌ๊ธฐ์˜ ํ”ฝ์…€, 1๊ฐœ์˜ gray channel ๊ทธ๋ฆฌ๊ณ  0 ~ 9์˜ ์ •์ˆ˜ label๋กœ ์ด๋ฃจ์–ด์ ธ ์žˆ๋‹ค. + +![](/posting_imgs/lab7-2-1.png) + +### torchvision + +minist๋Š” torchvision ๋ชจ๋“ˆ์„ ํ†ตํ•ด ๋ถˆ๋Ÿฌ์˜จ๋‹ค. torchvision์€ ์—ฌ๋Ÿฌ ๋ฐ์ดํ„ฐ ์…‹์ด๋‚˜ ์•„ํ‚คํ…์ฒ˜, ์ „์ฒ˜๋ฆฌ๋ฅผ ํ•  ์ˆ˜ ์žˆ๋Š” ๊ธฐ๋Šฅ๋“ค์„ ๋‚ด์žฅํ•˜๊ณ  ์žˆ๋Š” ๋ชจ๋“ˆ์ด๋‹ค. + +```python +import torch +import torchvision.datasets as dsets +import torchvision.transforms as transforms +import matplotlib.pyplot as plt + +# parameters +training_epochs = 15 +batch_size = 100 + +# MNIST dataset +mnist_train = dsets.MNIST(root='MNIST_data/', + train=True, # train set + transform=transforms.ToTensor(), + download=True) + +mnist_test = dsets.MNIST(root='MNIST_data/', + train=False, # test set + transform=transforms.ToTensor(), + download=True) + +# minibatch +data_loader - torch.utils.DataLoader(DataLoader=mnist_train, batch_size=batch_size, shuffle=True, drop_last=True) +``` + +mnist๋Š” 60000๊ฐœ์˜ train set๊ณผ 10000๊ฐœ์˜ test set์œผ๋กœ ๊ตฌ์„ฑ๋˜์–ด ์žˆ๊ณ , train prameter์— boolean ๊ฐ’์„ ๋„ฃ์–ด ๊ฐ ์…‹์„ ๋ถˆ๋Ÿฌ์˜ฌ ์ˆ˜ ์žˆ๋‹ค. + +๋‹ค๋ฅธ ๋ฐ์ดํ„ฐ ์…‹๋“ค๊ณผ ๋งˆ์ฐฌ๊ฐ€์ง€๋กœ `DataLoader`๋ฅผ ํ†ตํ•ด ๋ฏธ๋‹ˆ๋ฐฐ์น˜๋ฅผ ๋‚˜๋ˆ„์–ด ํ•™์Šตํ•  ์ˆ˜ ์žˆ๋‹ค. + +### Model + +```python +# MNIST data image of shape 28 * 28 = 784 +linear = torch.nn.Linear(784, 10, bias=True).to(device) + +# define cost/loss & optimizer +criterion = torch.nn.CrossEntropyLoss().to(device) +optimizer = torch.optim.SGD(linear.parameters(), lr=0.1) +``` + +๋ชจ๋ธ์€ ์„ ํ˜•๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜๋ฉฐ ์ด๋ฏธ์ง€์˜ ํฌ๊ธฐ๊ฐ€ 28x28์ด๋ฏ€๋กœ 28*28=784์˜ ์ฐจ์›์„ ๊ฐ€์ง€๋Š” ์ž…๋ ฅ์„ ๋ฐ›๋„๋ก ์ •์˜ํ•œ๋‹ค. + +### Train +```python +for epoch in range(training_epochs): + avg_cost = 0 + total_batch = len(data_loader) + + for X, Y in data_loader: + # reshape input image into [batch_size by 784] + # label is not one-hot encoded + X = X.view(-1, 28 * 28).to(device) + Y = Y.to(device) + + hypothesis = linear(X) + cost = criterion(hypothesis, Y) + + optimizer.zero_grad() + cost.backward() + optimizer.step() + + avg_cost += cost / total_batch + + print('Epoch:', '%04d' % (epoch + 1), 'cost =', '{:.9f}'.format(avg_cost)) + +print('Learning finished') + +'''output +Epoch: 0001 cost = 0.535468459 +Epoch: 0002 cost = 0.359274179 +Epoch: 0003 cost = 0.331187516 +Epoch: 0004 cost = 0.316578031 +Epoch: 0005 cost = 0.307158142 +Epoch: 0006 cost = 0.300180674 +Epoch: 0007 cost = 0.295130163 +Epoch: 0008 cost = 0.290851504 +Epoch: 0009 cost = 0.287417084 +Epoch: 0010 cost = 0.284379542 +Epoch: 0011 cost = 0.281825215 +Epoch: 0012 cost = 0.279800713 +Epoch: 0013 cost = 0.277809024 +Epoch: 0014 cost = 0.276154280 +Epoch: 0015 cost = 0.274440825 +Learning finished +''' +``` + +[Lab4-2](/posts/dlZeroToAll-PyTorch-4_2/)์—์„œ ํ•™์Šตํ•œ ๋ฐฉ์‹๊ณผ ๊ฐ™์ด `data_loader`๋ฅผ for๋ฅผ ํ†ตํ•ด ๋ฐ˜๋ณตํ•˜๋ฉฐ ์ง„ํ–‰ํ•œ๋‹ค. + +์ด๋•Œ ๊ธฐ์กด์˜ ์ด๋ฏธ์ง€ ๋ฐ์ดํ„ฐ์˜ minibatch๋Š” `[batch_size, 1, 28, 28]`์˜ ํฌ๊ธฐ๋ฅผ ๊ฐ€์ง€๊ธฐ ๋•Œ๋ฌธ์—, ๋ชจ๋ธ์˜ ์ž…๋ ฅ์— ๋งž๊ฒŒ `[batch_size, 28*28]`๋กœ ๋ฐ”๊ฟ”์ฃผ๋Š” ๊ณผ์ •์ด ํ•„์š”ํ•˜๋‹ค. ์ด ๊ณผ์ •์„ ์œ„ํ•ด `X = X.view(-1, 28 * 28).to(device)`๋กœ ๋ฐ์ดํ„ฐ๋ฅผ ์žฌ๊ตฌ์„ฑํ•œ ๊ฒƒ์„ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. + +๋‚˜๋จธ์ง€๋Š” ํ•™์Šต์€ ๊ธฐ์กด์˜ ํ˜•ํƒœ์™€ ๋™์ผํ•˜๋‹ค. + +### Test + +ํ…Œ์ŠคํŠธ๋ฅผ ์ง„ํ–‰ํ•  ๋•Œ์—๋Š” ์ด๋ฏธ ํ•™์Šต๋œ ๋ชจ๋ธ์— ๋Œ€ํ•ด ํ•™์Šต์ด ์ž˜ ๋˜์—ˆ๋Š”์ง€๋ฅผ ํ™•์ธํ•˜๋Š” ๊ฒƒ์ด๊ธฐ ๋•Œ๋ฌธ์— gradient descent๋กœ ์ธํ•œ ๊ฐ€์ค‘์น˜ ์—…๋ฐ์ดํŠธ๊ฐ€ ๋˜๋ฉด ์•ˆ๋œ๋‹ค. +๊ทธ๋ž˜์„œ `with torch.no_grad()`{:.python} ์•ˆ์—์„œ ์—…๋ฐ์ดํŠธ ๋˜๋Š” ๊ฒƒ์„ ๋ง‰์œผ๋ฉด์„œ ํ…Œ์ŠคํŠธ๋ฅผ ์ง„ํ–‰ํ•œ๋‹ค. + +```python +# Test the model using test sets +with torch.no_grad(): + X_test = mnist_test.test_data.view(-1, 28 * 28).float().to(device) + Y_test = mnist_test.test_labels.to(device) + + prediction = linear(X_test) + correct_prediction = torch.argmax(prediction, 1) == Y_test + accuracy = correct_prediction.float().mean() + print('Accuracy:', accuracy.item()) + + # Get one and predict + r = random.randint(0, len(mnist_test) - 1) + X_single_data = mnist_test.test_data[r:r + 1].view(-1, 28 * 28).float().to(device) + Y_single_data = mnist_test.test_labels[r:r + 1].to(device) + + print('Label: ', Y_single_data.item()) + single_prediction = linear(X_single_data) + print('Prediction: ', torch.argmax(single_prediction, 1).item()) + + plt.imshow(mnist_test.test_data[r:r + 1].view(28, 28), cmap='Greys', interpolation='nearest') + plt.show() + +'''output +Accuracy: 0.8862999677658081 +Label: 8 +Prediction: 3 +''' +``` + +![](/posting_imgs/lab7-2-1.png) + +ํ•™์Šตํ•œ ๋ชจ๋ธ์— test ์ž…๋ ฅ์„ ํ†ต๊ณผ์‹œ์ผœ ๋‚˜์˜จ ๊ฒฐ๊ณผ๋ฅผ `argmax`๋ฅผ ํ†ตํ•ด ๋ชจ๋ธ์ด ์˜ˆ์ธกํ•œ label์„ ๋ฝ‘์•„๋‚ผ ์ˆ˜ ์žˆ๋‹ค. +์ดํ›„ test์˜ ์‹ค์ œ label๊ณผ ๋น„๊ตํ•˜์—ฌ ByteTensor๋ฅผ ์ƒ์„ฑํ•˜๊ณ , ๊ทธ ํ‰๊ท ์„ ๊ตฌํ•ด ์ •ํ™•๋„๋ฅผ ๊ณ„์‚ฐํ•  ์ˆ˜ ์žˆ๋‹ค. + +ํ•œ ๋ฐ์ดํ„ฐ์— ๋Œ€ํ•œ ์ถœ๋ ฅ๊ฐ’์€ ์‹ถ๋‹ค๋ฉด test_data์™€ label์„ ์Šฌ๋ผ์ด์‹ฑ ํ•˜์—ฌ ๋ชจ๋ธ์— ๋„ฃ์–ด์„œ ๊ฒฐ๊ณผ๊ฐ’์„ ์ถœ๋ ฅํ•˜๋Š” ๊ฒƒ์œผ๋กœ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. + +๊ทธ ๋ฐ์ดํ„ฐ์— ๋Œ€ํ•œ ์ด๋ฏธ์ง€๋Š” `plt.imshow`๋ฅผ ํ†ตํ•ด ๋กน์ธํ•  ์ˆ˜ ์žˆ๋‹ค. cmap(color map)์„ grey๋กœ ์„ค์ •ํ•˜๊ณ , interpolation(๋ณด๊ฐ„)์„ nearest๋กœ ํ•˜๋ฉด mnist ์ด๋ฏธ์ง€๋ฅผ ์–ป์„ ์ˆ˜ ์žˆ๋‹ค. \ No newline at end of file diff --git a/_posts/2022-05-11-dlZeroToAll-PyTorch-8-1.markdown b/_posts/2022-05-11-dlZeroToAll-PyTorch-8-1.markdown new file mode 100644 index 00000000000..a5cc3c3af69 --- /dev/null +++ b/_posts/2022-05-11-dlZeroToAll-PyTorch-8-1.markdown @@ -0,0 +1,120 @@ +--- +title: "๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹ 2 - Lab8-1: Perceptron" +author: Kwon +date: 2022-05-11T23:00:00 +0900 +categories: [pytorch, study] +tags: [perceptron] +math: true +mermaid: false +--- + +[๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹](https://deeplearningzerotoall.github.io/season2/lec_pytorch.html) Lab8-1: Perceptron ๊ฐ•์˜๋ฅผ ๋ณธ ํ›„ ๊ณต๋ถ€๋ฅผ ๋ชฉ์ ์œผ๋กœ ์ž‘์„ฑํ•œ ๊ฒŒ์‹œ๋ฌผ์ž…๋‹ˆ๋‹ค. + +*** + +## Neuron + +๋จผ์ € ํผ์…‰ํŠธ๋ก ์˜ ์ปจ์…‰์ด ๋œ ๋‰ด๋Ÿฐ์— ๋Œ€ํ•ด ์•Œ์•„๋ณด์ž. ๋‰ด๋Ÿฐ์€ ๋™๋ฌผ์˜ ์‹ ๊ฒฝ๊ณ„๋ฅผ ๊ตฌ์„ฑํ•˜๋Š” ์„ธํฌ๋กœ ๋‹ค์Œ๊ณผ ๊ฐ™์€ ํ˜•ํƒœ์ด๋‹ค. + +![๋™๋ฌผ์˜ ๋‰ด๋Ÿฐ](/posting_imgs/lab8-1-1.svg) + +๋‰ด๋Ÿฐ์€ ์ž๊ทน์„ ์ „๊ธฐ ์‹ ํ˜ธ๋ฅผ ์ „๋‹ฌํ•˜๋Š” ํ†ต๋กœ๋ผ๊ณ  ํ•  ์ˆ˜ ์žˆ๋Š”๋ฐ, ์ด๋•Œ ๊ฐ•๋„๊ฐ€ ์–ด๋А ์ •๋„(threshold)๋ฅผ ๋„˜์–ด์„œ๋Š” ์‹ ํ˜ธ๋งŒ์„ ์ „๋‹ฌํ•œ๋‹ค. + +*** + +## Perceptron + +ํผ์…‰ํŠธ๋ก ์€ ์ด๋Ÿฐ ๋‰ด๋Ÿฐ์˜ ์ž‘๋™ ๋ฐฉ์‹์„ ๋ฐ˜์˜ํ•˜์—ฌ ๋งŒ๋“  ์ธ๊ณต์‹ ๊ฒฝ๋ง์˜ ํ•œ ์ข…๋ฅ˜๋กœ, ๋‹ค์ˆ˜์˜ ์ž…๋ ฅ์„ ๋ฐ›์•„ ํ•˜๋‚˜์˜ ์ถœ๋ ฅ์„ ๋‚ด๋ณด๋‚ด๋Š” ๊ตฌ์กฐ์ด๋‹ค. + +![ํผ์…‰ํŠธ๋ก ์˜ ๊ตฌ์กฐ](/posting_imgs/lab8-1-2.png) + +์ฒ˜์Œ ํผ์…‰ํŠธ๋ก ์ด ๋“ฑ์žฅํ–ˆ์„ ๋•Œ๋Š” AND, OR ๋ฌธ์ œ๋ฅผ ์•„์ฃผ ์ž˜ ํ•ด๊ฒฐํ•˜์˜€๋‹ค. ๋‹ค์Œ ๊ทธ๋ž˜ํ”„๋ฅผ ๋ณด๋ฉด AND, OR ๋ฌธ์ œ ๋ชจ๋‘ ์„ ํ˜•์œผ๋กœ ์ž˜ ๋ถ„๋ฆฌ๋˜๋Š” ๊ฒƒ์„ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. + +![AND(์ขŒ)์™€ OR(์šฐ)์˜ ๋ถ„๋ฅ˜ ํ˜•ํƒœ](/posting_imgs/lab8-1-3.jpg) + +๊ทธ๋ž˜์„œ ๋” ๋ณต์žกํ•œ ๋ฌธ์ œ๋„ ํ’€์–ด๋‚ผ ์ˆ˜ ์žˆ์ง€ ์•Š์„๊นŒ ํ•˜๋ฉฐ ๊ธฐ๋Œ€๋ฅผ ๋ฐ›์•˜์—ˆ๋Š”๋ฐ, XOR ๋ฌธ์ œ๊ฐ€ ๋‹จ์ผ ํผ์…‰ํŠธ๋ก ์œผ๋กœ ํ•ด๊ฒฐํ•  ์ˆ˜ ์—†๋‹ค๋Š” ๊ฒƒ์ด ์ฆ๋ช…๋˜๋ฉด์„œ ํผ์…‰ํŠธ๋ก  ์—ฐ๊ตฌ์— ์•”ํ‘๊ธฐ๊ฐ€ ๋„๋ž˜ํ•˜๊ฒŒ ๋˜์—ˆ๋‹ค. + +![๋‹จ์ผ ์„ ํ˜•์œผ๋กœ ํ•ด๊ฒฐํ•  ์ˆ˜ ์—†๋Š” XOR](/posting_imgs/lab8-1-4.jpg) + +์œ„ ๊ทธ๋ฆผ์„ ๋ณด๋ฉด ์–ด๋–ค ์ง์„ ์„ ๊ทธ์–ด๋„ ํ•˜๋‚˜์˜ ์ง์„ ์œผ๋กœ๋Š” XOR ๋ฌธ์ œ๋ฅผ ๋ช…ํ™•ํ•˜๊ฒŒ ๊ฐ€๋ฅผ ์ˆ˜ ์—†๋Š” ๊ฒƒ์„ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. +์‹ค์ œ๋กœ๋„ ๊ทธ๋Ÿฐ์ง€ ์ฝ”๋“œ๋กœ ํ•™์Šตํ•ด ๋ณด๋ฉด์„œ ํ•œ๋ฒˆ ํ™•์ธ ํ•ด ๋ณด์ž. + +### XOR train code with single preceptron + +๋ชจ๋ธ์€ ์„ ํ˜•์— ์‹œ๊ทธ๋ชจ์ด๋“œ๋ฅผ ํ™œ์„ฑํ™” ํ•จ์ˆ˜๋กœ ์‚ฌ์šฉํ•˜๋„๋ก ํ•˜๊ณ , ์ด์ง„๋ถ„๋ฅ˜์ด๋ฏ€๋กœ ์†์‹คํ•จ์ˆ˜๋กœ `BECLoss`๋ฅผ ์‚ฌ์šฉํ•œ๋‹ค. + +```python +import torch + +device = 'cuda' if torch.cuda.is_available() else 'cpu' + +# ์‹œ๋“œ ๊ณ ์ • +torch.manual_seed(777) +if device == 'cuda': + torch.cuda.manual_seed_all(777) + +X = torch.FloatTensor([[0, 0], [0, 1], [1, 0], [1, 1]]).to(device) +Y = torch.FloatTensor([[0], [1], [1], [0]]).to(device) +``` + +๋จผ์ € XOR์— ๋Œ€ํ•œ ๋ฐ์ดํ„ฐ๋ฅผ ์ •์˜ํ•ด ์ค€๋‹ค. + +```python +linear = torch.nn.Linear(2, 1, bias=True) +sigmoid = torch.nn.Sigmoid() + +# Sequential๋กœ ์—ฌ๋Ÿฌ ๋ชจ๋“ˆ์„ ๋ฌถ์–ด ํ•˜๋‚˜์˜ ๋ ˆ์ด์–ด๋กœ ์‚ฌ์šฉ +model = torch.nn.Sequential(linear, sigmoid).to(device) + +criterion = torch.nn.BCELoss().to(device) +optimizer = torch.optim.SGD(model.parameters(), lr=1) + +'''output +0 0.7273974418640137 +100 0.6931475400924683 +200 0.6931471824645996 +300 0.6931471824645996 +400 0.6931471824645996 +500 0.6931471824645996 +600 0.6931471824645996 +... +9700 0.6931471824645996 +9800 0.6931471824645996 +9900 0.6931471824645996 +10000 0.6931471824645996 +''' +``` + +100epoch ๋ถ€ํ„ฐ loss๊ฐ€ ์ž˜ ๊ฐ์†Œํ•˜์ง€ ์•Š๋”๋‹ˆ 200๋ถ€ํ„ฐ๋Š” ์•„์˜ˆ ๊ฐ์†Œํ•˜์ง€ ์•Š๋Š”๋‹ค. ํ™•์‹คํžˆ ํ™•์ธํ•ด ๋ณด๊ธฐ ์œ„ํ•ด ์ •ํ™•๋„๋ฅผ ์ถœ๋ ฅํ•ด ๋ณด๋ฉด + +```python +with torch.no_grad(): + hypothesis = model(X) + predicted = (hypothesis > 0.5).float() + accuracy = (predicted == Y).float().mean() + print('\nHypothesis: ', hypothesis.detach().cpu().numpy(), '\nCorrect: ', predicted.detach().cpu().numpy(), '\nAccuracy: ', accuracy.item()) + +'''output +Hypothesis: [[0.5] + [0.5] + [0.5] + [0.5]] +Correct: [[0.] + [0.] + [0.] + [0.]] +Accuracy: 0.5 +''' +``` + +๋ชจ๋“  hypothesis ๊ฒฐ๊ณผ๊ฐ€ 0.5๋กœ ๋‚˜์˜ค๋Š” ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. ์ด์ฒ˜๋Ÿผ ๋‹จ์ผ ํผ์…‰ํŠธ๋ก ์œผ๋กœ๋Š” XOR์„ ์ œ๋Œ€๋กœ ๋ถ„๋ฅ˜ํ•  ์ˆ˜ ์—†๋‹ค. + +๋‹ค์Œ ํฌ์ŠคํŒ…์—์„œ๋Š” XOR์„ ํ•ด๊ฒฐํ•œ Multi Layer Percetron์— ๋Œ€ํ•ด ์•Œ์•„๋ณด๊ฒ ๋‹ค. + +*** + +#### Image source + +* Neuron: [https://commons.wikimedia.org/wiki/File:Neuron.svg](https://commons.wikimedia.org/wiki/File:Neuron.svg) + +* Perceptron: [https://commons.wikimedia.org/wiki/File:Rosenblattperceptron.png](https://commons.wikimedia.org/wiki/File:Rosenblattperceptron.png) \ No newline at end of file diff --git a/_posts/2022-05-12-dlZeroToAll-PyTorch-8-2.markdown b/_posts/2022-05-12-dlZeroToAll-PyTorch-8-2.markdown new file mode 100644 index 00000000000..ffd102d8e12 --- /dev/null +++ b/_posts/2022-05-12-dlZeroToAll-PyTorch-8-2.markdown @@ -0,0 +1,163 @@ +--- +title: "๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹ 2 - Lab8-2: Multi Layer Perceptron" +author: Kwon +date: 2022-05-12T23:00:00 +0900 +categories: [pytorch, study] +tags: [perceptron] +math: true +mermaid: false +--- + +[๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹](https://deeplearningzerotoall.github.io/season2/lec_pytorch.html) Lab8-2: Multi Layer Perceptron ๊ฐ•์˜๋ฅผ ๋ณธ ํ›„ ๊ณต๋ถ€๋ฅผ ๋ชฉ์ ์œผ๋กœ ์ž‘์„ฑํ•œ ๊ฒŒ์‹œ๋ฌผ์ž…๋‹ˆ๋‹ค. + +*** + +## MLP(Multi Layer Perceptron) + +MLP๋Š” ๋‹จ์ผ ํผ์…‰ํŠธ๋ก ์„ ์—ฌ๋Ÿฌ๊ฐœ ์Œ“์€ ๊ฒƒ์œผ๋กœ ๋‹จ์ผ ํผ์…‰ํŠธ๋ก ์œผ๋กœ ํ•ด๊ฒฐํ•˜์ง€ ๋ชปํ•œ XOR๊ณผ ๊ฐ™์€ ๋ฌธ์ œ๋ฅผ ํ•ด๊ฒฐํ•˜๊ธฐ ์œ„ํ•ด ์ œ์•ˆ๋œ ๊ตฌ์กฐ์ด๋‹ค. + +![](/posting_imgs/lab8-2-1.jpg) + +์œ„์™€ ๊ฐ™์ด XOR ๋ฌธ์ œ๋Š” ์„ ์„ 2๊ฐœ ๊ทธ์–ด ๋ฌธ์ œ๋ฅผ ํ•ด๊ฒฐํ•  ์ˆ˜ ์žˆ๋Š”๋ฐ, ํ•œ๋™์•ˆ MLP๋ฅผ ํ˜„์‹ค์ ์œผ๋กœ ํ•™์Šตํ•  ์ˆ˜ ์žˆ๋Š” ๋ฐฉ๋ฒ•์„ ์ฐพ์ง€ ๋ชปํ•˜๋‹ค๊ฐ€ **backpropagation**์ด ๋“ฑ์žฅํ•˜๋ฉด์„œ ํž‰์Šต์ด ๊ฐ€๋Šฅํ•ด์กŒ๋‹ค. + +*** + +## Data and Model + +```python +import torch + +device = 'cuda' if torch.cuda.is_available() else 'cpu' + +# ์‹œ๋“œ ๊ณ ์ • +torch.manual_seed(777) +if device == 'cuda': + torch.cuda.manual_seed_all(777) + +X = torch.FloatTensor([[0, 0], [0, 1], [1, 0], [1, 1]]).to(device) +Y = torch.FloatTensor([[0], [1], [1], [0]]).to(device) +``` + +์œ„์™€ ๊ฐ™์ด ๋ฐ์ดํ„ฐ๋ฅผ XOR์— ๋งž๊ฒŒ ๋งŒ๋“ค์–ด ์ฃผ๊ณ , ๋ชจ๋ธ๋„ ์ƒ์„ฑํ•ด ์ค€๋‹ค. ์ด๋ฒˆ์—๋Š” MLP๋กœ ํ•™์Šตํ•  ๊ฒƒ์ด๊ธฐ ๋•Œ๋ฌธ์— ์„ ํ˜• ๋ ˆ์ด์–ด 2๊ฐœ๋ฅผ ๋งŒ๋“ค์–ด `Sequential`๋กœ ๋ฌถ์–ด ์ค€๋‹ค. +loss๋Š” ๋งˆ์ฐฌ๊ฐ€์ง€๋กœ ์ด์ง„๋ถ„๋ฅ˜์ด๋ฏ€๋กœ `BCELoss`๋ฅผ ์‚ฌ์šฉํ•œ๋‹ค. + +```python +linear1 = torch.nn.Linear(2, 2, bias=True) +linear2 = torch.nn.Linear(2, 1, bias=True) +sigmoid = torch.nn.Sigmoid() + +model = torch.nn.Sequential(linear1, sigmoid, linear2, sigmoid).to(device) + +criterion = torch.nn.BCELoss().to(device) +optimizer = torch.optim.SGD(model.parameters(), lr=1) +``` + +*** + +## Train + +ํ•™์Šต ์ฝ”๋“œ์˜ ํ˜•ํƒœ๋Š” ๋™์ผํ•˜๋‹ค. + +```python +for step in range(10001): + hypothesis = model(X) + + # cost/loss function + cost = criterion(hypothesis, Y) + + optimizer.zero_grad() + cost.backward() + optimizer.step() + + if step % 100 == 0: + print(step, cost.item()) + +'''output +0 0.7434073090553284 +100 0.6931650638580322 +200 0.6931577920913696 +300 0.6931517124176025 +400 0.6931463479995728 +500 0.6931411027908325 +600 0.693135678768158 +700 0.6931295394897461 +800 0.693122148513794 +900 0.6931126713752747 +1000 0.6930999755859375 +... +9700 0.001285637030377984 +9800 0.0012681199004873633 +9900 0.0012511102249845862 +10000 0.0012345188297331333 +''' +``` + +์กฐ๊ธˆ์”ฉ์ด์ง€๋งŒ ํ™•์‹คํžˆ loss๊ฐ€ ์ค„์–ด๋“œ๋Š” ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. ์ •ํ™•๋„๋„ ํ•œ๋ฒˆ ์ถœ๋ ฅํ•ด ๋ณด์ž. + +```python +with torch.no_grad(): + hypothesis = model(X) + predicted = (hypothesis > 0.5).float() + accuracy = (predicted == Y).float().mean() + print('\nHypothesis: ', hypothesis.detach().cpu().numpy(), '\nCorrect: ', predicted.detach().cpu().numpy(), '\nAccuracy: ', accuracy.item()) + +'''output + +Hypothesis: [[0.00106364] + [0.99889404] + [0.99889404] + [0.00165861]] +Correct: [[0.] + [1.] + [1.] + [0.]] +Accuracy: 1.0 +''' +``` + +XOR์„ ์•„์ฃผ ์ž˜ ๋ถ„๋ฅ˜ํ•ด ์ค€๋‹ค. ์ด๋ฒˆ ํ•™์Šต์—์„œ๋Š” 2๊ฐœ์˜ ๋ ˆ์ด์–ด๋งŒ์„ ์Œ“์•˜์ง€๋งŒ ์—ฌ๋Ÿฌ๊ฐœ์˜ ๋ ˆ์ด์–ด, ๋˜๋Š” ๋” ๋…ธ๋“œ๊ฐ€ ๋งŽ์€ ๋ ˆ์ด์–ด๋„ ๋งŒ๋“ค ์ˆ˜ ์žˆ๋‹ค. + +### NN Wide Deep + +```python +linear1 = torch.nn.Linear(2, 10, bias=True) +linear2 = torch.nn.Linear(10, 10, bias=True) +linear3 = torch.nn.Linear(10, 10, bias=True) +linear4 = torch.nn.Linear(10, 1, bias=True) +sigmoid = torch.nn.Sigmoid() + +model = torch.nn.Sequential(linear1, sigmoid, linear2, sigmoid, linear3, sigmoid, linear4, sigmoid).to(device) + +for step in range(10001): + optimizer.zero_grad() + hypothesis = model(X) + + # cost/loss function + cost = criterion(hypothesis, Y) + cost.backward() + optimizer.step() + + if step % 100 == 0: + print(step, cost.item()) + +'''output +0 0.6948983669281006 +100 0.6931558847427368 +200 0.6931535005569458 +300 0.6931513547897339 +400 0.6931493282318115 +500 0.6931473016738892 +600 0.6931453943252563 +700 0.6931434869766235 +800 0.6931416988372803 +900 0.6931397914886475 +1000 0.6931380033493042 +... +9700 0.00016829342348501086 +9800 0.00016415018762927502 +9900 0.00016021561168599874 +10000 0.0001565046259202063 +''' +``` + +2๊ฐœ์˜ ๋ ˆ์ด์–ด๋ฅผ ์Œ“์•˜์„ ๋•Œ(0.0012345188297331333)๋ณด๋‹ค loss๊ฐ€ ๋” ๋‚ฎ์•„์ง„ ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. \ No newline at end of file diff --git a/_posts/2022-05-13-backpropagation.markdown b/_posts/2022-05-13-backpropagation.markdown new file mode 100644 index 00000000000..e4c263a4bb2 --- /dev/null +++ b/_posts/2022-05-13-backpropagation.markdown @@ -0,0 +1,54 @@ +--- +title: "Backpropagation" +author: Kwon +date: 2022-05-13T00:00:00 +0900 +categories: [background] +tags: [backpropagation] +math: true +mermaid: false +--- + +*** + +## Backpropagation + +๋ฐ์ดํ„ฐ๋ฅผ ๋ ˆ์ด์–ด์˜ ๋…ธ๋“œ๋“ค์„ ํ†ต๊ณผ์‹œํ‚ค๋ฉด์„œ ์„ค์ •๋œ weight์— ๋”ฐ๋ผ ์˜ˆ์ธก ๊ฒฐ๊ณผ๊ฐ’์„ ๊ณ„์‚ฐํ•˜๋Š” ๊ฒƒ์„ **forward pass**๋ผ๊ณ  ํ•œ๋‹ค. + +![Forward Pass](/posting_imgs/forward.png) + +MLP์˜ ๊ฒฐ๊ณผ๊ฐ’์„ ๋งŒ๋“ค์–ด ์ฃผ๋Š” ํ•„์ˆ˜์ ์ธ ๊ณผ์ •์ด์ง€๋งŒ ์ด๊ฒƒ๋งŒ์œผ๋กœ๋Š” weight๋ฅผ ๊ฐœ์„ ํ•˜๋ฉด์„œ ํ•™์Šต์„ ์ง„ํ–‰ํ•  ์ˆ˜๊ฐ€ ์—†๋‹ค. + +์ด๊ฒƒ์„ ๊ฐ€๋Šฅํ•˜๊ฒŒ ํ•ด์ค€ ๊ฒƒ์ด ๋ฐ”๋กœ backpropagation(์˜ค์ฐจ ์—ญ์ „ํŒŒ)์ด๋‹ค. + +์—ญ์ „ํŒŒ ์•Œ๊ณ ๋ฆฌ์ฆ˜์€ chain rule์„ ํ†ตํ•ด ๊ฐ ๋…ธ๋“œ๋“ค์— prediction๊ณผ target์˜ total error๋ฅผ ์ „๋‹ฌํ•˜์—ฌ $\nabla W$๋ฅผ ๊ณ„์‚ฐํ•  ์ˆ˜ ์žˆ๊ฒŒ ํ•ด ์ค€๋‹ค. +์˜ค์ฐจ ์—ญ์ „ํŒŒ๋ผ๋Š” ์ด๋ฆ„์ด ๋ถ™์€ ๊ฒƒ๋„ ์ด ๋•Œ๋ฌธ์ด๋‹ค. + +์ด๋ ‡๊ฒŒ ๊ณ„์‚ฐํ•œ $\nabla W$๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ๋‹ค์Œ๊ณผ ๊ฐ™์ด weight๋ฅผ ๊ฐœ์„ ํ•œ๋‹ค. + +\\[W:=W-\alpha\nabla W\\] + +์ž์„ธํ•œ ๋‚ด์šฉ์€ [๊ด€๋ จ ํฌ์ŠคํŒ…](/posts//dlZeroToAll-PyTorch-3/)์„ ์ฐธ๊ณ ํ•˜์ž + +์œ„์—์„œ ๋‚˜์™”๋˜ ๋„คํŠธ์›Œํฌ์˜ ์ผ๋ถ€๋ถ„์„ ๋–ผ์„œ ์—ญ์ „ํŒŒ๊ฐ€ ์–ด๋–ป๊ฒŒ ์ ์šฉ๋˜๋Š”์ง€ ํ™•์ธํ•ด ๋ณด์ž. + +![Backpropagation ๊ณผ์ •](/posting_imgs/backward.png) + +๋จผ์ € ์ตœ์ข… output์— ๊ฐ€์žฅ ๊ฐ€๊นŒ์šด $\nabla W_3$๋ถ€ํ„ฐ ๊ณ„์‚ฐํ•œ๋‹ค. + +\\[\nabla w_3=\frac{\partial cost}{\partial W_3}=\frac{\partial cost}{\partial o_1}\frac{\partial o_1}{\partial y_1}\frac{\partial y_1}{\partial W_3}\\] + +$\nabla W_3$๋Š” chain rule์„ ํ†ตํ•ด ์œ„์™€ ๊ฐ™์ด ๋ฏธ๋ถ„์ด ๋ฐ”๋กœ ๋˜๋Š” ํ˜•์‹์œผ๋กœ ํ‘œํ˜„ํ•  ์ˆ˜ ์žˆ๋‹ค. ํ•œ ๋ฒˆ ๋” ๊ฑฐ์Šฌ๋Ÿฌ ์˜ฌ๋ผ๊ฐ€ ๋ณด์ž. + +์ด๋ฒˆ์—๋Š” $\nabla W_1$๊ณผ $\nabla W_2$๋ฅผ ๊ตฌํ•  ์ฐจ๋ก€์ด๋‹ค. ๋จผ์ € $\nabla X_1$์„ ๊ตฌํ•ด๋ณด์ž. + +\\[\frac{\partial cost}{\partial W_1}=\frac{\partial cost}{\partial y_1}\frac{\partial y_1}{\partial h_{z_2}}\frac{\partial h_{z_2}}{\partial z_2}\frac{\partial z_2}{\partial W_1}\\] + +๋งˆ์ฐฌ๊ฐ€์ง€๋กœ chain rule์„ ํ†ตํ•ด ๊ณ„์‚ฐํ•œ๋‹ค. $\frac{\partial cost}{\partial y_1}$์€ $\nabla X_3$์„ ๊ตฌํ•  ๋•Œ ๊ตฌํ–ˆ์—ˆ๊ธฐ ๋•Œ๋ฌธ์— ๋ชจ๋“  ํŽธ๋ฏธ๋ถ„์ด ๊ณ„์‚ฐ ๊ฐ€๋Šฅํ•˜๋‹ค. + +$\nabla W_2$๋Š” ์—ฌ๊ธฐ์„œ $W_1$์„ $W_2๋กœ ๋ฐ”๊ฟ”์ฃผ๊ธฐ๋งŒ ํ•˜๋ฉด ๋œ๋‹ค.$ + +\\[\frac{\partial cost}{\partial W_2}=\frac{\partial cost}{\partial y_1}\frac{\partial y_1}{\partial h_{z_2}}\frac{\partial h_{z_2}}{\partial z_2}\frac{\partial z_2}{\partial W_2}\\] + +์ตœ์ข… output์ด ์—†์–ด๋„ forward ๋„์ค‘์— ๊ณ„์‚ฐํ•  ์ˆ˜ ์žˆ๋Š” ํŽธ๋ฏธ๋ถ„๋“ค์€ forward pass๋ฅผ ํ•˜๋ฉด์„œ ๋ฏธ๋ฆฌ ๊ณ„์‚ฐํ•˜์—ฌ ์ €์žฅํ•ด ๋‘๊ณ  ์‚ฌ์šฉํ•œ๋‹ค. + +์ด๋ฒˆ ์˜ˆ์ œ์—์„œ๋Š” ์•„์ฃผ ๊ฐ„๋‹จํ•œ ๋„คํŠธ์›Œํฌ์— ๋Œ€ํ•ด ๋‹ค๋ค˜์ง€๋งŒ ๋” ๊นŠ๊ณ  ๋„“์€ ๋„คํŠธ์›Œํฌ์— ๋Œ€ํ•ด์„œ๋„ ๋˜‘๊ฐ™์ด ์ ์šฉํ•  ์ˆ˜ ์žˆ๋‹ค. \ No newline at end of file diff --git a/_posts/2022-05-14-dlZeroToAll-PyTorch-9-1.markdown b/_posts/2022-05-14-dlZeroToAll-PyTorch-9-1.markdown new file mode 100644 index 00000000000..a8546f869f4 --- /dev/null +++ b/_posts/2022-05-14-dlZeroToAll-PyTorch-9-1.markdown @@ -0,0 +1,189 @@ +--- +title: "๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹ 2 - Lab9-1:ReLU" +author: Kwon +date: 2022-05-14T00:00:00 +0900 +categories: [pytorch, study] +tags: [relu, activation, optimizer, gradient-vanishing] +math: true +mermaid: false +--- + +[๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹](https://deeplearningzerotoall.github.io/season2/lec_pytorch.html) Lab9-1: ReLU ๊ฐ•์˜๋ฅผ ๋ณธ ํ›„ ๊ณต๋ถ€๋ฅผ ๋ชฉ์ ์œผ๋กœ ์ž‘์„ฑํ•œ ๊ฒŒ์‹œ๋ฌผ์ž…๋‹ˆ๋‹ค. + +*** + +## Problem of Sigmoid + +์‹œ๊ทธ๋ชจ์ด๋“œ ํ•จ์ˆ˜์˜ ๋ฌธ์ œ๋Š” [backpropagation](/posts/backpropagation/)๊ณผ์ •์—์„œ ๋ฐœ์ƒํ•œ๋‹ค. +backpropagation์„ ์ˆ˜ํ–‰ํ•  ๋•Œ activation์˜ ๋ฏธ๋ถ„๊ฐ’ ๊ณฑํ•ด๊ฐ€๋ฉด์„œ ์‚ฌ์šฉํ•˜๊ฒŒ ๋˜๋Š”๋ฐ ์ด๋•Œ ๊ธฐ์šธ๊ธฐ๊ฐ€ ์†Œ์‹ค๋˜๋Š” **gradient vanishing**๋ฌธ์ œ๊ฐ€ ๋ฐœ์ƒํ•œ๋‹ค. ๋‹ค์Œ ๊ทธ๋ฆผ์€ ์‹œ๊ทธ๋ชจ์ด๋“œ ํ•จ์ˆ˜์™€ ๊ทธ ๋ฏธ๋ถ„ ํ•จ์ˆ˜์˜ ๊ทธ๋ž˜ํ”„์ด๋‹ค. + +![์‹œ๊ทธ๋ชจ์ด๋“œ ํ•จ์ˆ˜์˜ ๋ฏธ๋ถ„](/posting_imgs/lab9-1-1.png) + +$x$ ๊ฐ’์ด 0์—์„œ ๋ฉ€์–ด์งˆ์ˆ˜๋ก ๋ฏธ๋ถ„๊ฐ’์ด 0์— ์ˆ˜๋ ดํ•˜๋Š” ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. ํ•œ๋‘๋ฒˆ์˜ ๊ณฑ์œผ๋กœ 0์ด ๋˜์ง€๋Š” ์•Š์„์ง€๋ผ๋„ ์—ฌ๋Ÿฌ๋ฒˆ ๋ฐ˜๋ณตํ•˜๋ฉด์„œ ์ˆ˜ํ–‰ํ•˜๊ฒŒ ๋˜๋ฉด ๊ณฑ์ด ์—ฌ๋Ÿฌ๋ฒˆ ์ค‘์ฒฉ๋˜์–ด 0์œผ๋กœ ์ˆ˜๋ ดํ•˜๊ฒŒ ๋  ๊ฒƒ์ด๋‹ค. + +๊ทธ๋ ‡๊ฒŒ ๋˜๋ฉด ์ •์ƒ์ ์ธ ํ•™์Šต์ด ๋ถˆ๊ฐ€๋Šฅํ•˜๋‹ค. ์ด๋Ÿฐ ์‹œ๊ทธ๋ชจ์ด๋“œ์˜ ๋‹จ์ ์„ ํ•ด๊ฒฐํ•˜๊ธฐ ์œ„ํ•ด ๋‚˜์˜จ ๊ฒƒ์ด **ReLU**์ด๋‹ค. + +*** + +## ReLU + +ReLU์˜ ์‹๊ณผ ๋ชจ์Šต์€ ๋‹ค์Œ๊ณผ ๊ฐ™์ด ๊ฐ„๋‹จํžˆ ์ฃผ์–ด์ง„๋‹ค. + +\\[f(x)=max(0,x)\\]] + +![ReLU activation](/posting_imgs/lab9-1-2.png) + +์ด๋ ‡๊ฒŒ activation์„ ๊ตฌ์„ฑํ•  ๊ฒฝ์šฐ 0 ์ด์ƒ์˜ ๊ฐ’๋“ค์— ๋Œ€ํ•ด์„œ๋Š” ๋ฏธ๋ถ„๊ฐ’์ด 1์ด ๋‚˜์™€ ๊ธฐ์šธ๊ธฐ ์†Œ์‹ค์„ ๋ง‰์„ ์ˆ˜ ์žˆ๋‹ค. + +PyTorch๋Š” ์ด๋Ÿฐ activation๋“ค์„ `torch.nn.ReLU` ๋“ฑ์œผ๋กœ ๋ถˆ๋Ÿฌ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ์œผ๋ฉฐ ๋งŽ์ด ์‚ฌ์šฉํ•˜๋Š” ๊ฒƒ๋“ค์€ ์•„๋ž˜์™€ ๊ฐ™๋‹ค. + +```python +torch.nn.ReLU +torch.nn.LeakyReLU +torch.nn.Sigmoid +torch.nn.Tanh +``` + +*** + +## Optimizer + +์ด๋ฒˆ ๊ฐ•์˜์—์„œ๋Š” ์—ฌ๋Ÿฌ optimizer์— ๋Œ€ํ•ด์„œ๋„ ์†Œ๊ฐœ๋ฅผ ํ–ˆ๋Š”๋ฐ, `torch.optim`์œผ๋กœ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ๋Š” optimizer๋กœ๋Š” ์šฐ๋ฆฌ๊ฐ€ ๊ณ„์† ์‚ฌ์šฉํ•œ SGD ๋ง๊ณ ๋„ ๋‹ค์Œ๊ณผ ๊ฐ™์ด ๋งŽ์ด ์กด์žฌํ•œ๋‹ค. + +```python +torch.optim.SGD +torch.optim.Adadelta +torch.optim.Adagrad +torch.optim.Adam +torch.optim.SparseAdam +torch.optim.Adamax +torch.optim.ASGD +torch.optim.LBFGS +torch.optim.RMSprop +torch.optim.Rprop +``` + +optimizer์— ๋Œ€ํ•ด์„œ๋Š” ์ถ”ํ›„์— ํ•œ๋ฒˆ ์ž์„ธํžˆ ๊ณต๋ถ€ํ•ด ๋ณด๊ณ  ๋”ฐ๋กœ ํฌ์ŠคํŒ… ํ•˜๋ ค๊ณ  ํ•œ๋‹ค. + +*** + +## Train + +[Lab7-2](/posts/dlZeroToAll-PyTorch-7-2/)์—์„œ ํ–ˆ๋˜ mnist ํ•™์Šต์„ adam optimizer๋กœ ํ•œ ๋ฒˆ ํ•™์Šตํ•ด๋ณด๊ณ , ๋‹ค์ค‘ ๋ ˆ์ด์–ด์— ReLU๋ฅผ ์ ์šฉํ•ด์„œ ๋˜ ํ•œ๋ฒˆ ํ•™์Šตํ•ด ๋ณด๊ฒ ๋‹ค. + +### Adam + +๋ฐ์ดํ„ฐ๋‚˜ ๋ชจ๋ธ์„ ์ •์˜ํ•˜๋Š” ๊ฒƒ์€ ๋ชจ๋‘ ์ด์ „๊ณผ ๋™์ผํ•˜๊ธฐ ๋•Œ๋ฌธ์— ๋‹ฌ๋ผ์ง„ ๋ถ€๋ถ„์— ์ดˆ์ ์„ ๋งž์ถฐ ์ฝ”๋“œ๋ฅผ ํ•œ ๋ฒˆ ๋ณด๋„๋ก ํ•˜์ž. + +```python +criterion = torch.nn.CrossEntropyLoss().to(device) +optimizer = torch.optim.Adam(linear.parameters(), lr=learning_rate) # Adam optimizer + +total_batch = len(data_loader) +for epoch in range(training_epochs): + avg_cost = 0 + + for X, Y in data_loader: + # reshape input image into [batch_size by 784] + # label is not one-hot encoded + X = X.view(-1, 28 * 28).to(device) + Y = Y.to(device) + + optimizer.zero_grad() + hypothesis = linear(X) + cost = criterion(hypothesis, Y) + cost.backward() + optimizer.step() + + avg_cost += cost / total_batch + + print('Epoch:', '%04d' % (epoch + 1), 'cost =', '{:.9f}'.format(avg_cost)) + +print('Learning finished') + +'''output +Epoch: 0001 cost = 4.848181248 +Epoch: 0002 cost = 1.464641452 +Epoch: 0003 cost = 0.977406502 +Epoch: 0004 cost = 0.790303528 +Epoch: 0005 cost = 0.686833322 +Epoch: 0006 cost = 0.618483305 +Epoch: 0007 cost = 0.568978667 +Epoch: 0008 cost = 0.531290889 +Epoch: 0009 cost = 0.501056492 +Epoch: 0010 cost = 0.476258427 +Epoch: 0011 cost = 0.455025405 +Epoch: 0012 cost = 0.437031567 +Epoch: 0013 cost = 0.421489984 +Epoch: 0014 cost = 0.408599794 +Epoch: 0015 cost = 0.396514893 +Learning finished +''' +``` + +๋‹ฌ๋ผ์ง„ ์ ์€ `optimizer`๋ฅผ ์ •์˜ํ•  ๋•Œ `torch.optim.Adam`์„ ์‚ฌ์šฉํ•˜์˜€๋‹ค๋Š” ๊ฒƒ ๋ฟ์ด๋‹ค. + +### MLP with ReLU + +```python +# nn layers +linear1 = torch.nn.Linear(784, 256, bias=True) +linear2 = torch.nn.Linear(256, 256, bias=True) +linear3 = torch.nn.Linear(256, 10, bias=True) +relu = torch.nn.ReLU() + +# Initialization +torch.nn.init.normal_(linear1.weight) +torch.nn.init.normal_(linear2.weight) +torch.nn.init.normal_(linear3.weight) + +# model +model = torch.nn.Sequential(linear1, relu, linear2, relu, linear3).to(device) +``` + +์—ฌ๊ธฐ์„œ ๋‹ฌ๋ผ์ง„ ๊ฒƒ์€ ๋‹ค์ค‘ ๋ ˆ์ด์–ด๋ฅผ ์Œ“๊ณ , ํ™œ์„ฑํ™” ํ•จ์ˆ˜๋กœ ReLU๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ๋ชจ๋ธ์„ ๊ตฌ์„ฑํ–ˆ๋‹ค๋Š” ๊ฒƒ์ด๋‹ค. + +ํ•™์Šต ๊ฒฐ๊ณผ๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. + +```python +total_batch = len(data_loader) +for epoch in range(training_epochs): + avg_cost = 0 + + for X, Y in data_loader: + # reshape input image into [batch_size by 784] + # label is not one-hot encoded + X = X.view(-1, 28 * 28).to(device) + Y = Y.to(device) + + optimizer.zero_grad() + hypothesis = model(X) + cost = criterion(hypothesis, Y) + cost.backward() + optimizer.step() + + avg_cost += cost / total_batch + + print('Epoch:', '%04d' % (epoch + 1), 'cost =', '{:.9f}'.format(avg_cost)) + +print('Learning finished') + +'''output +Epoch: 0001 cost = 129.325607300 +Epoch: 0002 cost = 36.169139862 +Epoch: 0003 cost = 23.025590897 +Epoch: 0004 cost = 16.021036148 +Epoch: 0005 cost = 11.609578133 +Epoch: 0006 cost = 8.560424805 +Epoch: 0007 cost = 6.369730949 +Epoch: 0008 cost = 4.782918930 +Epoch: 0009 cost = 3.604729652 +Epoch: 0010 cost = 2.682321310 +Epoch: 0011 cost = 2.086567640 +Epoch: 0012 cost = 1.640438557 +Epoch: 0013 cost = 1.297079921 +Epoch: 0014 cost = 1.083126664 +Epoch: 0015 cost = 0.751341939 +Learning finished +''' +``` \ No newline at end of file diff --git a/_posts/2022-05-14-dlZeroToAll-PyTorch-9-2.markdown b/_posts/2022-05-14-dlZeroToAll-PyTorch-9-2.markdown new file mode 100644 index 00000000000..13363e3f2be --- /dev/null +++ b/_posts/2022-05-14-dlZeroToAll-PyTorch-9-2.markdown @@ -0,0 +1,139 @@ +--- +title: "๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹ 2 - Lab9-2:Weight Initialization" +author: Kwon +date: 2022-05-14T01:00:00 +0900 +categories: [pytorch, study] +tags: [weight-init, rbm, dbn, xavier, he-init] +math: true +mermaid: false +--- + +[๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹](https://deeplearningzerotoall.github.io/season2/lec_pytorch.html) Lab9-2:Weight Initialization ๊ฐ•์˜๋ฅผ ๋ณธ ํ›„ ๊ณต๋ถ€๋ฅผ ๋ชฉ์ ์œผ๋กœ ์ž‘์„ฑํ•œ ๊ฒŒ์‹œ๋ฌผ์ž…๋‹ˆ๋‹ค. + +*** + +## Weight Initialization + +์ดˆ๊ธฐ weight์˜ ์„ค์ •์€ ํฌ๊ฒŒ ์ค‘์š”ํ•˜์ง€ ์•Š์•„ ๋ณด์ด์ง€๋งŒ ์‹ค์ œ๋กœ๋Š” ํฐ ์˜ํ–ฅ์„ ๋ฏธ์นœ๋‹ค. + +![์ดˆ๊ธฐ ์„ค์ •์— ๋”ฐ๋ฅธ error์˜ ์ฐจ์ด](/posting_imgs/lab9-2-1.png) + +์œ„ ๊ทธ๋ž˜ํ”„์—์„œ๋„ ๋ณผ ์ˆ˜ ์žˆ๋“ฏ์ด ์ ์ ˆํ•œ ์ดˆ๊ธฐํ™” ๊ธฐ๋ฒ•์œผ๋กœ ์ดˆ๊ธฐํ™”๋ฅผ ํ•ด์ค€ ๊ฒฝ์šฐ(N์ด ๋ถ™์–ด์žˆ๋Š” ๊ณก์„ ) ์‹ค์ œ๋กœ ์˜ค์ฐจ๊ฐ€ ์ค„์–ด๋“  ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. + +*** + +## DBN(Deep Belief Network) + +### RBM(Restricted Boltzmann Machine) + +DBN์„ ์•Œ๊ธฐ ์ „์— ๋จผ์ € RBM์„ ์•Œ๊ณ  ๊ฐ€์•ผ ํ•œ๋‹ค. + +![](/posting_imgs/lab9-2-2.png" description="RBM์˜ ๊ตฌ์กฐ" %} + +RBM์€ ๋ ˆ์ด์–ด ์•ˆ์—์„œ๋Š” ์—ฐ๊ฒฐ์ด ์—†๋Š”(restricted) ๋ ˆ์ด์–ด๋“ค์˜ ์ „์—ฐ๊ฒฐ ๊ตฌ์กฐ์ด๋‹ค. ์ผ์ข…์˜ encode(์œ„์ชฝ ๋ฐฉํ–ฅ์œผ๋กœ ์ง„ํ–‰), decode(์•„๋ž˜์ชฝ ๋ฐฉํ–ฅ์œผ๋กœ ์ง„ํ–‰)๋ฅผ ์ˆ˜ํ–‰ํ•œ๋‹ค๊ณ  ๋ณผ ์ˆ˜๋„ ์žˆ๋‹ค. + +### DBN + +DBN์€ RBM์„ ํ†ตํ•ด ์ ์ ˆํ•œ weight๋ฅผ ์ฐพ์•„ ์ดˆ๊ธฐํ™”ํ•œ๋‹ค. + +![DBN์˜ ๊ตฌ์กฐ](/posting_imgs/lab9-2-3.png) + +์œ„์™€ ๊ฐ™์ด ๊ฐ ๋ ˆ์ด์–ด๋งˆ๋‹ค RBM์„ ๊ตฌ์„ฑํ•˜์—ฌ ๋‹ค์Œ ๋ ˆ์ด์–ด๋กœ ํ•œ ๋ฒˆ ๊ฐ”๋‹ค๊ฐ€ ๋‹ค์‹œ ๋Œ์•„์™€ ๋ด์„œ ์ž˜ ๋ณต์›์ด ๋˜๋Š” weight๋ฅผ ์ฐพ์•„ ๊ทธ๊ฒƒ์œผ๋กœ ์ดˆ๊ธฐํ™”ํ•˜์—ฌ ํ•™์Šต์— ์‚ฌ์šฉํ•œ๋‹ค. +์ด์ „์˜ ํฌ์ŠคํŠธ์—์„œ ์•Œ์•„๋ดค๋˜ [Stacking Autoencoder](/posts/autoencoders-3/)๋„ ์ด DBN์˜ ํ•œ ์ข…๋ฅ˜์ด๋‹ค. + +ํ•˜์ง€๋งŒ ์š”์ฆ˜์€ ๊ฑฐ์˜ ์‚ฌ์šฉํ•˜์ง€ ์•Š๋Š” ๋ฐฉ๋ฒ•์ด๋‹ค. + +*** + +## Xavier / He Initialization + +์ด ๋‘๊ฐ€์ง€ ๋ฐฉ๋ฒ•์ด ํ˜„์žฌ ๋งŽ์ด ์‚ฌ์šฉ๋˜๋Š” ๋ฐฉ๋ฒ•์ธ๋ฐ, ์ด ๋ฐฉ์‹๋“ค์€ DBN๊ณผ ๊ฐ™์ด ๋ณต์žกํ•œ ํ•™์Šต๋“ค์ด ํ•„์š”ํ•˜์ง€ ์•Š๋‹ค๋Š” ๊ฒƒ์ด๋‹ค. + +๋‹จ์ˆœํžˆ ๋ถ„ํฌ์— ๋”ฐ๋ผ ์•„๋ž˜ ๊ณต์‹์„ ์ ์šฉํ•˜์—ฌ in/out์˜ node ์ˆ˜๋ฅผ ํ†ตํ•ด ๊ณ„์‚ฐํ•œ ๊ฐ’์„ ์ด์šฉํ•˜์—ฌ ์ดˆ๊ธฐํ™”ํ•œ๋‹ค. + +![](/posting_imgs/lab9-2-4.png) + +```python +def xavier_uniform_(tensor, gain=1): + fan_in, fan_out = _calculate_fan_in_and_fan_out(tensor) + std = gain * math.sqrt(2.0 / (fan_in + fan_out)) + a = math.sqrt(3.0) * std # Calculate uniform bounds from standard deviation + with torch.no_grad(): + return tensor.uniform_(-a, a) + ``` + + ์‹ค์ œ xavier์˜ ๊ตฌํ˜„์„ ๋ณด๋ฉด ์œ„์™€ ๊ฐ™์ด ๊ณต์‹ ๊ทธ๋Œ€๋กœ ๊ณ„์‚ฐํ•˜์—ฌ returnํ•ด ์ฃผ๋Š” ๊ฒƒ์„ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. + +*** + +## Train with Xavier + +Xavier๋ฅผ ํ†ตํ•ด ์•ž์„œ ํ•™์Šตํ•ด๋ดค๋˜ MLP๋ฅผ ํ•™์Šตํ•˜๋ ค๋ฉด ๋‹ค์Œ ๋ถ€๋ถ„๋งŒ ๋ณ€๊ฒฝํ•ด ์ฃผ๋ฉด ๋œ๋‹ค. + +```python +# nn layers +linear1 = torch.nn.Linear(784, 256, bias=True) +linear2 = torch.nn.Linear(256, 256, bias=True) +linear3 = torch.nn.Linear(256, 10, bias=True) +relu = torch.nn.ReLU() + +# xavier initialization +torch.nn.init.xavier_uniform_(linear1.weight) # not torch.nn.init.normal_() +torch.nn.init.xavier_uniform_(linear2.weight) +torch.nn.init.xavier_uniform_(linear3.weight) + +'''output +Parameter containing: +tensor([[-0.0215, -0.0894, 0.0598, ..., 0.0200, 0.0203, 0.1212], + [ 0.0078, 0.1378, 0.0920, ..., 0.0975, 0.1458, -0.0302], + [ 0.1270, -0.1296, 0.1049, ..., 0.0124, 0.1173, -0.0901], + ..., + [ 0.0661, -0.1025, 0.1437, ..., 0.0784, 0.0977, -0.0396], + [ 0.0430, -0.1274, -0.0134, ..., -0.0582, 0.1201, 0.1479], + [-0.1433, 0.0200, -0.0568, ..., 0.0787, 0.0428, -0.0036]], + requires_grad=True) +''' +``` + +`torch.nn.init.normal_()`๋Œ€์‹  `torch.nn.init.xavier_uniform_()`์„ ์‚ฌ์šฉํ•˜์—ฌ ์ดˆ๊ธฐํ™”๋ฅผ ์ง„ํ–‰ํ•œ๋‹ค. + +### Deep + +๋” ๊นŠ์€ ๋„คํŠธ์›Œํฌ๋„ ๋งˆ์ฐฌ๊ฐ€์ง€๋กœ ๋ ˆ์ด์–ด๋งˆ๋‹ค `torch.nn.init.xavier_uniform_()`์„ ์‚ฌ์šฉํ•ด ์ฃผ๋ฉด ๋œ๋‹ค. + +```python +# nn layers +linear1 = torch.nn.Linear(784, 512, bias=True) +linear2 = torch.nn.Linear(512, 512, bias=True) +linear3 = torch.nn.Linear(512, 512, bias=True) +linear4 = torch.nn.Linear(512, 512, bias=True) +linear5 = torch.nn.Linear(512, 10, bias=True) +relu = torch.nn.ReLU() + +# xavier initialization +torch.nn.init.xavier_uniform_(linear1.weight) +torch.nn.init.xavier_uniform_(linear2.weight) +torch.nn.init.xavier_uniform_(linear3.weight) +torch.nn.init.xavier_uniform_(linear4.weight) +torch.nn.init.xavier_uniform_(linear5.weight) + +'''output +Parameter containing: +tensor([[-0.0565, 0.0423, -0.0155, ..., 0.1012, 0.0459, -0.0191], + [ 0.0772, 0.0452, -0.0638, ..., 0.0476, -0.0638, 0.0528], + [ 0.0311, -0.1023, -0.0701, ..., 0.0412, -0.1004, 0.0738], + ..., + [ 0.0334, 0.0187, -0.1021, ..., 0.0280, -0.0583, -0.1018], + [-0.0506, -0.0939, -0.0467, ..., -0.0554, -0.0325, 0.0640], + [-0.0183, -0.0123, 0.1025, ..., -0.0214, 0.0220, -0.0741]], + requires_grad=True) +''' +``` + +*** + +#### Image Source + +* RBM: [https://en.wikipedia.org/wiki/File:Restricted-boltzmann-machine.svg](https://en.wikipedia.org/wiki/File:Restricted-boltzmann-machine.svg) + +* DBN: [http://www.dmi.usherb.ca/~larocheh/publications/deep-nets-icml-07.pdf](http://www.dmi.usherb.ca/~larocheh/publications/deep-nets-icml-07.pdf) \ No newline at end of file diff --git a/_posts/2022-05-15-dlZeroToAll-PyTorch-9-3.markdown b/_posts/2022-05-15-dlZeroToAll-PyTorch-9-3.markdown new file mode 100644 index 00000000000..9632df16270 --- /dev/null +++ b/_posts/2022-05-15-dlZeroToAll-PyTorch-9-3.markdown @@ -0,0 +1,156 @@ +--- +title: "๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹ 2 - Lab9-3:Dropout" +author: Kwon +date: 2022-05-15T00:00:00 +0900 +categories: [pytorch, study] +tags: [dropout, overfitting] +math: true +mermaid: false +--- + +[๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹](https://deeplearningzerotoall.github.io/season2/lec_pytorch.html) Lab9-3: Dropout ๊ฐ•์˜๋ฅผ ๋ณธ ํ›„ ๊ณต๋ถ€๋ฅผ ๋ชฉ์ ์œผ๋กœ ์ž‘์„ฑํ•œ ๊ฒŒ์‹œ๋ฌผ์ž…๋‹ˆ๋‹ค. + +*** + +## Dropout + +[lab7-1](/posts/dlZeroToAll-PyTorch-7-1/)์—์„œ ์•Œ์•„๋ณธ ๊ฒƒ์ฒ˜๋Ÿผ ํ•™์Šต์„ ํ•˜๋‹ค๋ณด๋ฉด train set์— ๋„ˆ๋ฌด ๊ณผ์ ํ•ฉ(overfitting)๋˜๋Š” ๊ฒฝ์šฐ๊ฐ€ ๋ฐœ์ƒํ•œ๋‹ค. + +์ด๋•Œ ์–ธ๊ธ‰ํ•œ ๊ณผ์ ํ•ฉ์„ ๋œ์–ด์ฃผ๊ธฐ ์œ„ํ•œ ๋ฐฉ๋ฒ•์—๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™์€ ๊ฒƒ๋“ค์ด ์žˆ์—ˆ๋‹ค. + +* Early Stoping: valid set์˜ loss๊ฐ€ ์ค„์–ด๋“ค์ง€ ์•Š์„ ๋•Œ ํ•™์Šต์„ ์ค‘์ง€ํ•œ๋‹ค. +* Reducing Network Size +* Weight Decay: weight๊ฐ€ ๋„ˆ๋ฌด ์ปค์ง€์ง€ ์•Š๋„๋ก wight๊ฐ€ ์ปค์งˆ์ˆ˜๋ก ํ•จ๊ป˜ ์ปค์ง€๋Š” penalty๋ฅผ ๋ถ€์—ฌํ•œ๋‹ค. +* Dropout: node์˜ ์ผ๋ถ€๋ฅผ ๊บผ์„œ ํ•™์Šตํ•˜๋Š” node๋ฅผ ์ค„์ธ๋‹ค. +* Batch Normalization: ํ•™์Šต ์ค‘์— ๋ฐฐ์น˜ ๋‹จ์œ„๋กœ ์ •๊ทœํ™” ํ•˜๋Š” ๊ฒƒ + +์˜ค๋Š˜์€ ์ด ์ค‘์—์„œ dropout์— ๋Œ€ํ•ด ์•Œ์•„๋ณด๋ ค ํ•œ๋‹ค. + +dropout์€ ์ผ์ • ํ™•๋ฅ ์— ๋”ฐ๋ผ ๋ ˆ์ด์–ด์˜ node๋ฅผ ๋„๋ฉด์„œ ํ•™์Šต์„ ์ง„ํ–‰ํ•˜๋Š” ๊ฒƒ์„ ๋งํ•œ๋‹ค. +์ฆ‰, ๋ ˆ์ด์–ด์˜ ์ผ๋ถ€ node๋ฅผ ํ•™์Šต์— ์‚ฌ์šฉํ•˜์ง€ ์•Š๋Š” ๊ฒƒ์ด๋‹ค. + +![Dropout์ด ์ ์šฉ๋œ ๋„คํŠธ์›Œํฌ(์šฐ)](/posting_imgs/lab9-3-1.png) + +์œ„ ๊ทธ๋ฆผ์ฒ˜๋Ÿผ ๊ฐ ๋…ธ๋“œ๋“ค์€ ์ผ์ •ํ•œ ํ™•๋ฅ ๋กœ ๋น„ํ™œ์„ฑํ™” ํ•œ๋‹ค. ์ด๋Ÿฐ ์‹์œผ๋กœ ํ•™์Šต์„ ํ•˜๊ฒŒ ๋˜๋ฉด ๊ณผํ•˜๊ฒŒ ํ•™์Šต๋˜๋Š” ๊ฒƒ์„ ๋ง‰์•„ ๊ณผ์ ํ•ฉ ๋ชจ๋ธ์ด ๋งŒ๋“ค์–ด์ง€๋Š” ๊ฒƒ์„ ๋ง‰์„ ์ˆ˜ ์žˆ์„ ๋ฟ๋งŒ ์•„๋‹ˆ๋ผ. +๊ฐ ์‹œํ–‰๋งˆ๋‹ค ํ™•๋ฅ ์ ์œผ๋กœ ๊บผ์ง€๊ณ  ์ผœ์ง€๋Š” node๊ฐ€ ๋‹ฌ๋ผ์ง€๊ธฐ ๋•Œ๋ฌธ์— ๋‹ค์–‘ํ•œ ๋„คํŠธ์›Œํฌ๋กœ ํ•™์Šตํ•˜์—ฌ ์•™์ƒ๋ธ”ํ•œ ํšจ๊ณผ๋„ ์–ป์„ ์ˆ˜ ์žˆ์–ด ์„ฑ๋Šฅ์˜ ํ–ฅ์ƒ์œผ๋กœ๋„ ์ด๋ฃจ์–ด์งˆ ์ˆ˜ ์žˆ๋‹ค๊ณ  ํ•œ๋‹ค. + +*** + +## Train with MNIST + +ํ•™์Šตํ•˜๋Š” ์ฝ”๋“œ๋Š” ๋ชจ๋ธ์˜ ๊ตฌ์„ฑํ•  ๋•Œ dropout์ด ์ถ”๊ฐ€๋œ ๊ฒƒ ๋ง๊ณ ๋Š” ํฐ ๋ณ€ํ™”๊ฐ€ ์—†์ง€๋งŒ ์ฃผ์˜ํ•ด์•ผ ํ•  ์ ์ด ์žˆ๋‹ค. ๋จผ์ € ์ฝ”๋“œ๋ฅผ ๋ณด์ž + +```python +# nn layers +linear1 = torch.nn.Linear(784, 512, bias=True) +linear2 = torch.nn.Linear(512, 512, bias=True) +linear3 = torch.nn.Linear(512, 512, bias=True) +linear4 = torch.nn.Linear(512, 512, bias=True) +linear5 = torch.nn.Linear(512, 10, bias=True) +relu = torch.nn.ReLU() +dropout = torch.nn.Dropout(p=drop_prob) + +# xavier initialization +torch.nn.init.xavier_uniform_(linear1.weight) +torch.nn.init.xavier_uniform_(linear2.weight) +torch.nn.init.xavier_uniform_(linear3.weight) +torch.nn.init.xavier_uniform_(linear4.weight) +torch.nn.init.xavier_uniform_(linear5.weight) + +# model +model = torch.nn.Sequential(linear1, relu, dropout, + linear2, relu, dropout, + linear3, relu, dropout, + linear4, relu, dropout, + linear5).to(device) +``` + +์—ฌ๊ธฐ๊นŒ์ง€๋Š” `Sequential`ํ†ตํ•ด ๋ชจ๋ธ์„ ๊ตฌ์„ฑํ•  ๋•Œ dropout์„ ์ถ”๊ฐ€ํ•ด ์ค€ ๊ฒƒ ๋ง๊ณ ๋Š” ๋‹ค๋ฅธ ๊ฒƒ์ด ์—†์ง€๋งŒ ์•„๋ž˜๋ฅผ ๋ณด๋ฉด ํ•™์Šตํ•˜๊ธฐ ์ „์— `model.train()`์ด๋ผ๋Š” ์ƒˆ๋กœ์šด ์ฝ”๋“œ๊ฐ€ ์ถ”๊ฐ€๋œ ๊ฒƒ์„ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. + +```python +total_batch = len(data_loader) +model.train() # set the model to train mode (dropout=True) +for epoch in range(training_epochs): + avg_cost = 0 + + for X, Y in data_loader: + # reshape input image into [batch_size by 784] + # label is not one-hot encoded + X = X.view(-1, 28 * 28).to(device) + Y = Y.to(device) + + optimizer.zero_grad() + hypothesis = model(X) + cost = criterion(hypothesis, Y) + cost.backward() + optimizer.step() + + avg_cost += cost / total_batch + + print('Epoch:', '%04d' % (epoch + 1), 'cost =', '{:.9f}'.format(avg_cost)) + +print('Learning finished') + +'''output +Epoch: 0001 cost = 0.308392197 +Epoch: 0002 cost = 0.142623395 +Epoch: 0003 cost = 0.113427199 +Epoch: 0004 cost = 0.093490042 +Epoch: 0005 cost = 0.083772294 +Epoch: 0006 cost = 0.077040948 +Epoch: 0007 cost = 0.067025252 +Epoch: 0008 cost = 0.063156039 +Epoch: 0009 cost = 0.058766391 +Epoch: 0010 cost = 0.055902217 +Epoch: 0011 cost = 0.052059878 +Epoch: 0012 cost = 0.048243146 +Epoch: 0013 cost = 0.047231019 +Epoch: 0014 cost = 0.045120358 +Epoch: 0015 cost = 0.040942233 +Learning finished +''' +``` + +์ด๊ฑด model์„ train ์šฉ์œผ๋กœ ์‚ฌ์šฉํ•  ๊ฒƒ์ธ์ง€, eval ์šฉ์œผ๋กœ ์‚ฌ์šฉํ•  ๊ฒƒ์ธ์ง€์— ๋”ฐ๋ผ mode๋ฅผ ๋ณ€๊ฒฝ์‹œ์ผœ์ฃผ๋Š” ์ฝ”๋“œ์ด๋‹ค. (์ดํ›„์— ๋‚˜์˜ฌ batch normalization ๋“ฑ์—์„œ๋„ ์‚ฌ์šฉ) + +์ด๋ ‡๊ฒŒ ๋ชจ๋“œ๋ฅผ ๋‚˜๋ˆ  ์ฃผ๋Š” ์ด์œ ๋Š” ํ•™์Šตํ•  ๋•Œ์™€ ๋‹ฌ๋ฆฌ ๊ฒ€์ฆํ•  ๋•Œ๋Š” dropout์„ ์‚ฌ์šฉํ•˜์ง€ ์•Š๊ณ  ๋ชจ๋“  node๋ฅผ ์‚ฌ์šฉํ•ด์„œ ์˜ˆ์ธก์„ ์ง„ํ–‰ํ•˜๊ธฐ ๋•Œ๋ฌธ์ด๋‹ค. + +๊ทธ๋Ÿฌ๋ฏ€๋กœ ๊ฒ€์ฆํ•  ๋•Œ๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์ฝ”๋“œ๋ฅผ ์ž‘์„ฑํ•ด์•ผ ํ•œ๋‹ค. + +```python +# Test model and check accuracy +with torch.no_grad(): + model.eval() # set the model to evaluation mode (dropout=False) + + # Test the model using test sets + X_test = mnist_test.test_data.view(-1, 28 * 28).float().to(device) + Y_test = mnist_test.test_labels.to(device) + + prediction = model(X_test) + correct_prediction = torch.argmax(prediction, 1) == Y_test + accuracy = correct_prediction.float().mean() + print('Accuracy:', accuracy.item()) + + # Get one and predict + r = random.randint(0, len(mnist_test) - 1) + X_single_data = mnist_test.test_data[r:r + 1].view(-1, 28 * 28).float().to(device) + Y_single_data = mnist_test.test_labels[r:r + 1].to(device) + + print('Label: ', Y_single_data.item()) + single_prediction = model(X_single_data) + print('Prediction: ', torch.argmax(single_prediction, 1).item()) + +'''output +Accuracy: 0.9820999503135681 +Label: 8 +Prediction: 8 +''' +``` + +๋‹ค๋ฅธ ๋ถ€๋ถ„์€ ๋ชจ๋‘ ๋™์ผํ•˜๊ฒŒ ์ž‘์„ฑํ•˜๊ณ  `model.eval()`๋งŒ ์ถ”๊ฐ€ํ•˜์—ฌ model์„ ๊ฒ€์ฆํ•˜๊ธฐ ์œ„ํ•œ mode๋กœ ๋ฐ”๊ฟ”์ฃผ๋Š” ๊ฒƒ์„ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. + +*** + +#### Image Source + +* Dropout: [http://jmlr.org/papers/volume15/srivastava14a.old/srivastava14a.pdf](http://jmlr.org/papers/volume15/srivastava14a.old/srivastava14a.pdf) \ No newline at end of file diff --git a/_posts/2022-05-15-dlZeroToAll-PyTorch-9-4.markdown b/_posts/2022-05-15-dlZeroToAll-PyTorch-9-4.markdown new file mode 100644 index 00000000000..306a1d0a7cc --- /dev/null +++ b/_posts/2022-05-15-dlZeroToAll-PyTorch-9-4.markdown @@ -0,0 +1,119 @@ +--- +title: "๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹ 2 - Lab9-4:Batch Normalization" +author: Kwon +date: 2022-05-15T01:00:00 +0900 +categories: [pytorch, study] +tags: [batch-normalization, gradient-vanishing, gradient-exploding, internal-covariate-shift] +math: true +mermaid: false +--- +[๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹](https://deeplearningzerotoall.github.io/season2/lec_pytorch.html) Lab9-4: Batch Normalization ๊ฐ•์˜๋ฅผ ๋ณธ ํ›„ ๊ณต๋ถ€๋ฅผ ๋ชฉ์ ์œผ๋กœ ์ž‘์„ฑํ•œ ๊ฒŒ์‹œ๋ฌผ์ž…๋‹ˆ๋‹ค. + +*** + +## Gradient Vanishing / Gradient Exploding + +Gradient Vanishing(๊ธฐ์šธ๊ธฐ ์†Œ์‹ค)๊ณผ Gradient Exploding(๊ธฐ์šธ๊ธฐ ํญ์ฃผ)๋Š” ์ •์ƒ์ ์ธ ํ•™์Šต์„ ํ•  ์ˆ˜ ์—†๊ฒŒ ๋งŒ๋“œ๋Š” ์š”์ธ๋“ค์ด๋‹ค. + +๊ธฐ์šธ๊ธฐ ์†Œ์‹ค์€ [์•ž์„œ](/posts/dlZeroToAll-PyTorch-9-1/) ์‚ดํŽด๋ณธ ๊ฒƒ์ฒ˜๋Ÿผ ์—ญ์ „ํŒŒ๋ฅผ ์ˆ˜ํ–‰ํ•  ๋•Œ ์ž‘์€ ๋ฏธ๋ถ„๊ฐ’์„ chain rule์— ๋”ฐ๋ผ ๊ณ„์† ๊ณฑํ•˜๋‹ค๊ฐ€ ๊ฒฐ๊ตญ 0์œผ๋กœ ์†Œ๋ฉธํ•ด๋ฒ„๋ฆฌ๋Š” ๊ฒƒ์ด์—ˆ๋‹ค. + +๋ฐ˜๋Œ€๋กœ ๊ธฐ์šธ๊ธฐ ํญ์ฃผ๋Š” ๋„ˆ๋ฌด ํฐ ๋ฏธ๋ถ„๊ฐ’์„ ๊ณ„์† ๊ณฑํ•˜๋‹ค๊ฐ€ ์ˆ˜๋ ดํ•˜์ง€ ๋ชปํ•˜๊ณ  ๋ฐœ์‚ฐํ•ด๋ฒ„๋ฆฌ๋Š” ๊ฒƒ์„ ์˜๋ฏธํ•œ๋‹ค. + +์ด๋“ค์„ ๋ฐฉ์ง€ํ•˜๊ธฐ ์œ„ํ•ด ์‹œ๋„ํ•ด๋ณผ ์ˆ˜ ์žˆ๋Š” ๊ฒƒ์€ ์•„๋ž˜์™€ ๊ฐ™์€ ๋ฐฉ๋ฒ•๋“ค์ด ์žˆ๋‹ค + +* Change activation function +* Careful initialization +* Small learning rate +* Batch Normalization + +์ด ์ค‘์—์„œ ์ด๋ฒˆ ๊ฐ•์˜์—์„œ๋Š” Batch Normalization์— ๋Œ€ํ•œ ๋‚ด์šฉ์„ ๋‹ค๋ฃจ์—ˆ๋‹ค. + +*** + +Batch normalization์„ ์ œ์‹œํ•  ๋•Œ ์ฃผ์žฅํ–ˆ๋˜ ๊ธฐ์šธ๊ธฐ ์†Œ์‹ค๊ณผ ํญ์ฃผ์— ๋Œ€ํ•œ ์ฃผ๋œ ์ด์œ ๊ฐ€ **Internal Covariate Shift**์˜€๋Š”๋ฐ ์ด์— ๋Œ€ํ•ด ํ•œ๋ฒˆ ์•Œ์•„๋ณด์ž. + +## Covariate Shift + +๋จผ์ € Covariate Shift๋Š” ๊ณต๋ณ€๋Ÿ‰ ๋ณ€ํ™”๋ผ๊ณ ๋„ ํ•˜๋Š”๋ฐ ์ž…๋ ฅ ๋ฐ์ดํ„ฐ์˜ ๋ถ„ํฌ๊ฐ€ ํ•™์Šตํ•  ๋•Œ์™€ ํ…Œ์ŠคํŠธํ•  ๋•Œ ๋‹ค๋ฅด๊ฒŒ ๋‚˜ํƒ€๋‚˜๋Š” ํ˜„์ƒ์„ ๋งํ•œ๋‹ค. + +์˜ˆ๋ฅผ ๋“ค์–ด ์†๊ธ€์”จ๋ฅผ ๋ถ„๋ฅ˜ํ•˜๋Š” ๋ชจ๋ธ์„ ๋งŒ๋“œ๋ ค๊ณ  ํ•  ๋•Œ ํ•™์Šต ๋ฐ์ดํ„ฐ๋กœ ํ•œ๊ธ€ ์†๊ธ€์”จ๋ฅผ ์‚ฌ์šฉํ•ด์„œ ํ•™์Šตํ•œ ํ›„ ์˜์–ด ์†๊ธ€์”จ๋ฅผ ๋ถ„๋ฅ˜ํ•˜๋ ค๊ณ  ํ•˜๋ฉด ๋‹น์—ฐํžˆ ๋ถ„๋ฅ˜๊ฐ€ ์ œ๋Œ€๋กœ ๋˜์ง€ ์•Š์„ ๊ฒƒ์ด๋‹ค. + +์ด๋Š” ๋ชจ๋ธ์ด ํ•™์Šตํ•œ ๋ถ„ํฌ์™€ ํ…Œ์ŠคํŠธ ๋ฐ์ดํ„ฐ์˜ ๋ถ„ํฌ๊ฐ€ ๋‹ค๋ฅด๊ธฐ ๋•Œ๋ฌธ์ด๋‹ค. + +## Internal Covariate Shift + +์ด๋Ÿฐ Covariate Shift๊ฐ€ ๋„คํŠธ์›Œํฌ ๋‚ด๋ถ€์—์„œ ๋ฐœ์ƒํ•˜๋Š” ๊ฒƒ์ด Inteanal Covariate Shift์ด๋‹ค. + +์ด์ „๊นŒ์ง€๋Š” ๋ชจ๋ธ ๋‹จ์œ„์˜ ๋ถ„ํฌ ๋ฌธ์ œ๋ผ๊ณ  ์ƒ๊ฐํ•˜๊ณ  ์ฒซ ์ž…๋ ฅ์—๋งŒ normalization์„ ํ–ˆ์ง€๋งŒ, ์‹ค์ œ๋กœ๋Š” ์•„๋ž˜์™€ ๊ฐ™์ด ์Šคํƒญ๋งˆ๋‹ค ๋ ˆ์ด์–ด ๋‹จ์œ„๋กœ Covariate Shift๊ฐ€ ๋ฐœ์ƒํ•œ๋‹ค๋Š” ๊ฒƒ์ด๋‹ค. + +![Internal Covariate Shift](/posting_imgs/lab9-4-1.png) + +๊ทธ๋Ÿฌ๋ฏ€๋กœ ๊ฐ ๋ ˆ์ด์–ด ํ•™์Šต๋งˆ๋‹ค ๊ฐ minibatch๋ฅผ normalization์„ ํ•ด์•ผ ํ•˜๋ฉฐ ์ด๋ฅผ Batch normalization์ด๋ผ๊ณ  ํ•œ๋‹ค. + +*** + +## Batch Normalization + +Batch normalization์€ ๋‹ค์Œ๊ณผ ๊ฐ™์€ ์•Œ๊ณ ๋ฆฌ์ฆ˜์œผ๋กœ ์ง„ํ–‰๋œ๋‹ค. + +![Batch Normalization Algorithm](/posting_imgs/lab9-4-2.png) + +$\mu_\mathcal{B}$์™€ $\sigma^2_\mathcal{B}$๋Š” minibatch๋ฅผ ํ†ตํ•ด ๊ฒŒ์‚ฐ๋œ ํ‰๊ท ๊ณผ ๋ถ„์‚ฐ์ด๋ฉฐ sample mean/variance๋ผ๊ณ ๋„ ๋ถˆ๋ฆฐ๋‹ค. ์ด๋“ค์„ ์ด์šฉํ•ด์„œ normalize ํ•˜๊ฒŒ ๋˜๋Š”๋ฐ ์ด๋•Œ $\epsilon$์€ 0์œผ๋กœ ๋‚˜๋ˆ„๋Š” ๊ฒƒ์„ ๋ฐฉ์ง€ํ•˜๋Š” ์•„์ฃผ ์ž‘์€ ๊ฐ’์ด๋‹ค. + +์ด๋ ‡๊ฒŒ normalize๋œ ๋ฐ์ดํ„ฐ๋ฅผ ๊ฐ€์ง€๊ณ  $\gamma$์™€ $\beta$๋ฅผ ํ•™์Šตํ•˜๋ฉด Batch normalization์„ ํ†ตํ•œ ํ•™์Šต์ด ํ•œ ๋ฒˆ ๋๋‚˜๊ฒŒ ๋œ๋‹ค. + +### Evaluation + +๊ฒ€์ฆํ•  ๋•Œ Batch normalization์„ ์‚ฌ์šฉํ•˜๋Š” ๋ชจ๋ธ์ด ๊ฐ’์˜ ์ผ๋ถ€๊ฐ€ ๋ฐ”๋€ batch๋ฅผ ๋ฐ›๋Š”๋‹ค๋ฉด ๊ทธ batch์˜ $\mu_\mathcal{B}$์™€ $\sigma^2_\mathcal{B}$ ์—ญ์‹œ ๋ฐ”๋€Œ๊ฒŒ ๋œ๋‹ค. +์ด๋ ‡๊ฒŒ ๋˜๋ฉด $\hat{x}$๋„ ๋‹ค๋ฅด๊ฒŒ ๊ณ„์‚ฐ๋˜๋Š”๋ฐ ์ด๋Ÿด ๊ฒฝ์šฐ ๊ฐ™์€ ๊ฐ’์ด๋ผ๋„ batch์˜ ์ผ๋ถ€๋ถ„์ด ๋ฐ”๋€Œ์—ˆ๋‹ค๋Š” ์‚ฌ์‹ค ํ•˜๋‚˜๋งŒ์œผ๋กœ ๋‹ค๋ฅธ ๊ฒฐ๊ณผ๊ฐ€ ๋‚˜์˜ค๊ฒŒ ๋œ๋‹ค. + +๊ฒ€์ฆ ๊ณผ์ •์—์„œ ์ด๋Ÿฌํ•œ ์ผ์ผ ๋ฐœ์ƒํ•˜๋Š” ๊ฒƒ์„ ๋ง‰๊ธฐ ์œ„ํ•ด ํ•™์Šต ์‹œ์— ๊ฒŒ์‚ฐํ–ˆ๋˜ $\mu_\mathcal{B}$์™€ $\sigma^2_\mathcal{B}$๋ฅผ ๋”ฐ๋กœ ์ €์žฅํ•ด ๋‘๊ณ , testํ•  ๋•Œ๋Š” ์ด๋ฏธ ๊ณ„์‚ฐ๋œ $\mu_\mathcal{B}$์™€ $\sigma^2_\mathcal{B}$๋ฅผ ์‚ฌ์šฉํ•œ๋‹ค. + +์ด๋Ÿฐ ๋ฐฉ์‹์œผ๋กœ ๊ฒฐ๊ณผ๊ฐ’์„ ๋„์ถœํ•˜๋ฉด ๊ฐ™์€ ๊ฐ’์— ๊ฐ™์€ ๊ฒฐ๊ณผ๋ฅผ ์–ป์„ ์ˆ˜ ์žˆ๋‹ค. ์ด๋Ÿฐ ์ด์œ ๋กœ Batch normalization๋„ [dropout](/posts/dlZeroToAll-PyTorch-9-3/)๊ณผ ๊ฐ™์ด ๋”ฐ๋กœ eval mode๋กœ ๋ฐ”๊ฟ”์ค˜์•ผ ํ•œ๋‹ค. + +*** + +## Train + +Batch normalization์„ ์ ์šฉํ•˜๋ ค๋ฉด ๊ธฐ์กด ์ฝ”๋“œ์—์„œ `torch.nn.BatchNorm1d`๋ฅผ ์ถ”๊ฐ€ํ•˜์—ฌ model์„ ๊ตฌ์„ฑํ•˜๋ฉด ๋œ๋‹ค. ์œ„์น˜๋Š” activation ์ง์ „์— ์‚ฌ์šฉํ•˜๋Š” ๊ฒƒ์ด ์ผ๋ฐ˜์ ์ด๋ผ๊ณ  ํ•œ๋‹ค. + +```python +# nn layers +linear1 = torch.nn.Linear(784, 32, bias=True) +linear2 = torch.nn.Linear(32, 32, bias=True) +linear3 = torch.nn.Linear(32, 10, bias=True) +relu = torch.nn.ReLU() +bn1 = torch.nn.BatchNorm1d(32) +bn2 = torch.nn.BatchNorm1d(32) + +nn_linear1 = torch.nn.Linear(784, 32, bias=True) +nn_linear2 = torch.nn.Linear(32, 32, bias=True) +nn_linear3 = torch.nn.Linear(32, 10, bias=True) + +# model with Batch normalization +bn_model = torch.nn.Sequential(linear1, bn1, relu, + linear2, bn2, relu, + linear3).to(device) + +# model without Batch normalization +nn_model = torch.nn.Sequential(nn_linear1, relu, + nn_linear2, relu, + nn_linear3).to(device) + +# define cost/loss & optimizer +criterion = torch.nn.CrossEntropyLoss().to(device) # Softmax is internally computed. +bn_optimizer = torch.optim.Adam(bn_model.parameters(), lr=learning_rate) +nn_optimizer = torch.optim.Adam(nn_model.parameters(), lr=learning_rate) +``` + +model์€ ์œ„์—์„œ ๋งํ•œ ๊ฒƒ์ฒ˜๋Ÿผ ์ด์™€๊ฐ™์ด ๊ตฌํ˜„ํ•  ์ˆ˜ ์žˆ๊ณ , ํ•™์Šต์€ dropout๊ณผ ๋™์ผํ•˜๊ฒŒ `model.train()`์„ ๋จผ์ € ํ•ด ์ฃผ๊ณ  ์ง„ํ–‰ํ•˜๋ฉด ๋œ๋‹ค. + +![](/posting_imgs/lab9-4-3.png) + +์‹ค์ œ๋กœ Batch normalization์„ ์ ์šฉํ•œ ๊ฒฝ์šฐ๊ฐ€ ๋” ํ•™์Šต์ด ์ž˜ ๋œ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. + +*** + +#### Image Source + +* Batch Normalization: [https://arxiv.org/pdf/1502.03167.pdf](https://arxiv.org/pdf/1502.03167.pdf) \ No newline at end of file diff --git a/_posts/2022-05-22-dlZeroToAll-PyTorch-10-1.markdown b/_posts/2022-05-22-dlZeroToAll-PyTorch-10-1.markdown new file mode 100644 index 00000000000..a07702dc668 --- /dev/null +++ b/_posts/2022-05-22-dlZeroToAll-PyTorch-10-1.markdown @@ -0,0 +1,228 @@ +--- +title: "๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹ 2 - Lab10-1: Convolution, Lab10-2: MNIST CNN" +author: Kwon +date: 2022-05-22T00:00:00 +0900 +categories: [pytorch, study] +tags: [convolution, padding, pooling, cnn] +math: true +mermaid: false +--- +[๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹](https://deeplearningzerotoall.github.io/season2/lec_pytorch.html) Lab10-1: Convolution, Lab10-2: MNIST CNN ๊ฐ•์˜๋ฅผ ๋ณธ ํ›„ ๊ณต๋ถ€๋ฅผ ๋ชฉ์ ์œผ๋กœ ์ž‘์„ฑํ•œ ๊ฒŒ์‹œ๋ฌผ์ž…๋‹ˆ๋‹ค. + +*** + +## Convolution + +๊ฐ•์˜ ์ž๋ฃŒ์—์„œ๋Š” **'์ด๋ฏธ์ง€(2์ฐจ์› ๋งคํŠธ๋ฆญ์Šค) ์œ„์—์„œ stride ๋งŒํผ filter(kernel)์„ ์ด๋™์‹œํ‚ค๋ฉด์„œ ๊ฒน์ณ์ง€๋Š” ๋ถ€๋ถ„์˜ ๊ฐ ์›์†Œ์˜ ๊ฐ’์„ ๊ณฑํ•ด์„œ ๋”ํ•œ ๊ฐ’์„ ์ถœ๋ ฅ์œผ๋กœ ํ•˜๋Š” ์—ฐ์‚ฐ'**์ด๋ผ๊ณ  ๋‚˜์™€์žˆ๋‹ค. ์ž์„ธํžˆ ์–ด๋–ค ๊ณผ์ •์˜ ์—ฐ์‚ฐ์ธ์ง€ ํ™•์ธํ•ด ๋ณด์ž. + +์•„๋ž˜์™€ ๊ฐ™์ด ์ฐจ๋ก€๋Œ€๋กœ input, filter, output ํ–‰๋ ฌ์ด ์žˆ๋‹ค๊ณ  ํ•ด๋ณด์ž. + +![](/posting_imgs/lab10-1-1.png) + +input, filter์˜ ์ง„ํ•œ ๋ถ€๋ถ„์„ ๊ฐ ์ž๋ฆฌ๋ผ๋ฆฌ ๊ณฑํ•ด์„œ ๋”ํ•ด์ฃผ๋Š” ๊ฒƒ์œผ๋กœ output์˜ ์ง„ํ•œ ๋ถ€๋ถ„์˜ ๊ฒฐ๊ณผ๋ฅผ ๋‚ธ๋‹ค. ์ด ์˜ˆ์ œ์˜ stride๋Š” 1์ด๊ธฐ ๋•Œ๋ฌธ์— ํ•œ ์นธ์”ฉ ์ปค๋„์„ ์ด๋™ํ•˜๋ฉด์„œ ์ด ๊ณผ์ •์„ ์ง„ํ–‰ํ•˜์—ฌ ์ตœ์ข…์ ์œผ๋กœ ์ƒˆ๋กœ์šด 3x3 output์„ ๋งŒ๋“ค์–ด๋‚ธ๋‹ค. + +์šฐ๋ฆฌ๊ฐ€ ์›ํ•˜๋Š” filter์™€ stride๋ฅผ ์„ค์ •ํ•˜์—ฌ ์œ„ ๊ณผ์ •์„ ํ†ตํ•ด ์ƒˆ๋กœ์šด ๋งคํŠธ๋ฆญ์Šค๋ฅผ ๋งŒ๋“œ๋Š” ๊ฒƒ์ด **convolution**์ด๋‹ค. + +### Padding + +Convolution ์—ฐ์‚ฐ์— ์“ฐ์ด๋Š” ๋ฐ์ดํ„ฐ์— **padding**์ด๋ผ๋Š” ์ฒ˜๋ฆฌ๋ฅผ ํ•  ์ˆ˜ ์žˆ๋Š”๋ฐ, ์ด๊ฒƒ์€ input๋ฅผ ์ผ์ •ํ•œ ์ˆ˜๋กœ ๊ฐ์‹ผ๋‹ค๋Š” ๋œป์œผ๋กœ 1์˜ zero-padding์„ ํ•œ๋‹ค๋Š” ๊ฒƒ์€ ๋‹ค์Œ๊ณผ ๊ฐ™์€ ์ž…๋ ฅ์œผ๋กœ ์—ฐ์‚ฐ์„ ์ง„ํ–‰ํ•˜๊ฒ ๋‹ค๋Š” ๋œป์ด๋‹ค. + +![](/posting_imgs/lab10-1-2.png){: width="40%"} + +### Output Size + +Convolution output์˜ ํฌ๊ธฐ๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์ฃผ์–ด์ง„๋‹ค. + +\\[ Output \, size = \frac{input \, size - filter \, size + (2*padding)}{Stride} + 1 \\] + +์˜ˆ๋ฅผ ๋“ค์–ด input size = (32, 64), kernel = 5, stride = 1, padding = 0๋กœ ์ฃผ์–ด์กŒ์„ ๋•Œ + +\\[ (\frac{(32-5)+(0\times2)}{1}+1 , \frac{(32-5)+(0\times2)}{1}+1) = (28, 60) \\] + +์œ„์ฒ˜๋Ÿผ ๊ณ„์‚ฐํ•  ์ˆ˜ ์žˆ๋‹ค. + +### Input Type in PyTorch + +PyTorch์—์„œ `torch.nn.Conv2d`์„ ์ด์šฉํ•˜์—ฌ convolution์„ ์—ฐ์‚ฐํ•  ๋•Œ input data์˜ type์€ `torch.Tensor`, shape์€ (N x C x H x W) = (batch_size, channel, height, width)์œผ๋กœ ๋งž์ถฐ์ค˜์•ผ ํ•œ๋‹ค. + +์œ„์—์„œ size๋ฅผ ๊ฒŒ์‚ฐํ–ˆ๋˜ ์˜ˆ์ œ๋ฅผ ์‹ค์ œ๋กœ ์ฝ”๋“œ๋กœ ์‹คํ–‰ํ•˜์—ฌ ํ™•์ธํ•˜๋ ค๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์ž‘์„ฑํ•˜๋ฉด ๋œ๋‹ค. + +![](/posting_imgs/lab10-1-3.png){: width="40%"} + +์‹ค์ œ๋กœ๋„ ๊ณ„์‚ฐ ๊ฒฐ๊ณผ์™€ ๊ฐ™์€ shape์ด ๋‚˜์˜ค๋Š” ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. + +### Convolution and Perceptron + +convolution์„ ๋‹ค์Œ๊ณผ ๊ฐ™์ด perceptron์œผ๋กœ ๋‚˜ํƒ€๋‚ผ ์ˆ˜๋„ ์žˆ๋‹ค. + +![](/posting_imgs/lab10-1-4.png) + +filter์˜ ๊ฐ’์„ weight๋กœ ๊ฐ€์ง€๊ณ  ์žˆ๋Š” perceptron์— stride๋งŒํผ ์›€์ง์ด๋ฉด์„œ ๋งคํŠธ๋ฆญ์Šค๋ฅผ ํ†ต๊ณผ์‹œํ‚ค๋ฉด output์˜ ๊ฐ ์ž๋ฆฌ ๊ฒฐ๊ณผ๊ฐ’๋“ค์ด ๊ณ„์‚ฐ๋œ๋‹ค. + +*** + +## Pooling + +Pooling์€ ์ฃผ์–ด์ง„ kernel size๋งŒํผ์˜ ๊ตฌ์—ญ์„ ๋Œ€ํ‘œํ•˜๋Š” ๊ฐ’๋“ค์„ ์ฐพ์•„์„œ ๊ทธ ๋Œ€ํ‘œ๊ฐ’์œผ๋กœ ์ƒˆ๋กœ์šด ๋งคํŠธ๋ฆญ์Šค๋ฅผ ๊ตฌ์„ฑํ•˜๋Š” ๊ฒƒ์„ ๋งํ•œ๋‹ค. + +![](/posting_imgs/lab10-1-5.png) + +์œ„ ๊ทธ๋ฆผ์€ kernel size๊ฐ€ 2์ธ max pooling๊ณผ average pooling์„ ๋‚˜ํƒ€๋‚ธ ๊ฒƒ์ด๋‹ค. + +max pooling์€ ๊ทธ ๊ตฌ์—ญ์˜ ์ตœ๋Œ€๊ฐ’์„ ์„ ํƒํ•˜๋Š” ๊ฒƒ์ด๊ณ , average pooling์€ ํ‰๊ท ๊ฐ’์„ ์„ ํƒํ•˜์—ฌ ์ƒˆ๋กœ์šด ๋งคํŠธ๋ฆญ์Šค๋ฅผ ๋งŒ๋“œ๋Š” ๊ฒƒ์ด๋‹ค. + +*** + +## Train CNN with MNIST + +### Import and Data + +seed๋ฅผ ๊ณ ์ •ํ•˜๊ณ  mnist dataset์„ ๋ถˆ๋Ÿฌ์™€์„œ `DataLoader`๋ฅผ ์ ์šฉํ•˜์—ฌ minibatch๋กœ ํ•™์Šตํ•  ์ˆ˜ ์žˆ๋„๋ก ๋งŒ๋“ค์–ด ์ค€๋‹ค. + +```python +import torch +import torchvision.datasets as dsets +import torchvision.transforms as transforms +import torch.nn.init + +device = 'cuda' if torch.cuda.is_available() else 'cpu' + +# for reproducibility +torch.manual_seed(777) +if device == 'cuda': + torch.cuda.manual_seed_all(777) + +learning_rate = 0.001 +training_epochs = 15 +batch_size = 100 + +# MNIST dataset +mnist_train = dsets.MNIST(root='MNIST_data/', + train=True, + transform=transforms.ToTensor(), + download=True) + +mnist_test = dsets.MNIST(root='MNIST_data/', + train=False, + transform=transforms.ToTensor(), + download=True) + +# dataset loader +data_loader = torch.utils.data.DataLoader(dataset=mnist_train, + batch_size=batch_size, + shuffle=True, + drop_last=True) +``` + +### Model and Loss/Optimizer + +์ด์ „๊ณผ ๋‹ค๋ฅด๊ฒŒ 3๊ฐœ์˜ ํฐ layer๋กœ ๋‚˜๋ˆ„์–ด model์„ ์ƒ์„ฑํ•œ๋‹ค. 2๊ฐœ์˜ convolution layer๋ฅผ ํ†ต๊ณผํ•˜๊ณ  ํ•˜๋‚˜์˜ fully connected layer๋ฅผ ํ†ต๊ณผ์‹œํ‚จ๋‹ค. +๋‹จ, fully connected layer๋กœ ๋“ค์–ด๊ฐ€๊ธฐ ์ „์— linear layer์— ๋“ค์–ด๊ฐˆ ์ˆ˜ ์žˆ๋„๋ก data๋ฅผ `view`๋ฅผ ์ด์šฉํ•˜์—ฌ flatํ•˜๊ฒŒ ๋งŒ๋“ ๋‹ค. + +```python +class CNN(torch.nn.Module): + + def __init__(self): + super(CNN, self).__init__() + # L1 ImgIn shape=(?, 1, 28, 28) + # Conv -> (?, 32, 28, 28) + # Pool -> (?, 32, 14, 14) + self.layer1 = torch.nn.Sequential( + torch.nn.Conv2d(1, 32, kernel_size=3, stride=1, padding=1), + torch.nn.ReLU(), + torch.nn.MaxPool2d(kernel_size=2, stride=2)) + # L2 ImgIn shape=(?, 32, 14, 14) + # Conv ->(?, 64, 14, 14) + # Pool ->(?, 64, 7, 7) + self.layer2 = torch.nn.Sequential( + torch.nn.Conv2d(32, 64, kernel_size=3, stride=1, padding=1), + torch.nn.ReLU(), + torch.nn.MaxPool2d(kernel_size=2, stride=2)) + # Final FC 7x7x64 inputs -> 10 outputs + self.fc = torch.nn.Linear(7 * 7 * 64, 10, bias=True) + torch.nn.init.xavier_uniform_(self.fc.weight) + + def forward(self, x): + out = self.layer1(x) + out = self.layer2(out) + out = out.view(out.size(0), -1) # Flatten them for FC + out = self.fc(out) + return out + +model = CNN().to(device) +``` + +loss๋Š” cross entropy๋ฅผ ์‚ฌ์šฉํ•˜๊ณ  optimizer๋Š” Adam์„ ์‚ฌ์šฉํ•œ๋‹ค. loss๋ฅผ `to(device)`๋กœ ํ•™์Šต์— ์‚ฌ์šฉํ•  device์— ๋ถ™์—ฌ์ฃผ๊ณ , optimizer๋ฅผ ์ƒ์„ฑํ•  ๋•Œ `model.parameters()`๋ฅผ ๋„ฃ์–ด์ฃผ๋Š” ๊ฒƒ์„ ์žŠ์ง€ ๋ง์ž + +```python +criterion = torch.nn.CrossEntropyLoss().to(device) +optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate) +``` + +### Train + +๊ธฐ์กด์— minibatch๋กœ ํ•™์Šตํ•˜๋˜ ์ฝ”๋“œ์™€ ํฌ๊ฒŒ ๋‹ค๋ฅผ ๊ฒƒ์ด ์—†๋‹ค. + +```python +# train my model +total_batch = len(data_loader) +print('Learning started. It takes sometime.') +for epoch in range(training_epochs): + avg_cost = 0 + + for X, Y in data_loader: + # image is already size of (28x28), no reshape + # label is not one-hot encoded + X = X.to(device) + Y = Y.to(device) + + optimizer.zero_grad() + hypothesis = model(X) + cost = criterion(hypothesis, Y) + cost.backward() + optimizer.step() + + avg_cost += cost / total_batch + + print('[Epoch: {:>4}] cost = {:>.9}'.format(epoch + 1, avg_cost)) + +print('Learning Finished!') + +'''output +Learning started. It takes sometime. +[Epoch: 1] cost = 0.223892078 +[Epoch: 2] cost = 0.0621332489 +[Epoch: 3] cost = 0.0448851325 +[Epoch: 4] cost = 0.0356322788 +[Epoch: 5] cost = 0.0289768185 +[Epoch: 6] cost = 0.0248806253 +[Epoch: 7] cost = 0.0209558196 +[Epoch: 8] cost = 0.0180539284 +[Epoch: 9] cost = 0.0153525099 +[Epoch: 10] cost = 0.0128902728 +[Epoch: 11] cost = 0.0104844831 +[Epoch: 12] cost = 0.0100922994 +[Epoch: 13] cost = 0.00803675782 +[Epoch: 14] cost = 0.00732926652 +[Epoch: 15] cost = 0.00600952888 +Learning Finished! +''' +``` + +convolution layer๋ฅผ ์ด์šฉํ•˜์—ฌ model์„ ๊ตฌ์„ฑํ•ด๋„ ํ•™์Šต์ด ์ž˜ ๋œ ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. + +```python +# Test model and check accuracy +with torch.no_grad(): + X_test = mnist_test.test_data.view(len(mnist_test), 1, 28, 28).float().to(device) + Y_test = mnist_test.test_labels.to(device) + + prediction = model(X_test) + correct_prediction = torch.argmax(prediction, 1) == Y_test + accuracy = correct_prediction.float().mean() + print('Accuracy:', accuracy.item()) + +'''output +Accuracy: 0.9878999590873718 +''' +``` \ No newline at end of file diff --git a/_posts/2022-05-23-dlZeroToAll-PyTorch-10-3.markdown b/_posts/2022-05-23-dlZeroToAll-PyTorch-10-3.markdown new file mode 100644 index 00000000000..3e1b960510e --- /dev/null +++ b/_posts/2022-05-23-dlZeroToAll-PyTorch-10-3.markdown @@ -0,0 +1,217 @@ +--- +title: "๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹ 2 - Lab10-3: Visdom" +author: Kwon +date: 2022-05-23T00:00:00 +0900 +categories: [pytorch, study] +tags: [visdom, visualization] +math: true +mermaid: false +--- +[๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹](https://deeplearningzerotoall.github.io/season2/lec_pytorch.html) Lab10-3: Visdom ๊ฐ•์˜๋ฅผ ๋ณธ ํ›„ ๊ณต๋ถ€๋ฅผ ๋ชฉ์ ์œผ๋กœ ์ž‘์„ฑํ•œ ๊ฒŒ์‹œ๋ฌผ์ž…๋‹ˆ๋‹ค. + +*** + +## Visdom + +Visdom์€ Meta ์‚ฌ(facebook)์—์„œ ์ œ๊ณตํ•˜๋Š” PyTorch์—์„œ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ๋Š” ์‹œ๊ฐํ™” ๋„๊ตฌ์ด๋‹ค. ์‹ค์‹œ๊ฐ„์œผ๋กœ ๋ฐ์ดํ„ฐ๋ฅผ ์‹œ๊ฐํ™”ํ•˜๋ฉด์„œ ๋ฐ”๋€Œ๋Š” ์ ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค๋Š” ์žฅ์ ์ด ์žˆ๋‹ค. + +### Install + +ํ„ฐ๋ฏธ๋„์—์„œ pip๋ฅผ ์ด์šฉํ•˜์—ฌ ์„ค์น˜ํ•  ์ˆ˜ ์žˆ๋‹ค. ์„ค์น˜๊ฐ€ ์™„๋ฃŒ๋œ ํ›„์—๋Š” visdom server๋ฅผ ์‹คํ–‰ํ•ด ์ฃผ์–ด์•ผ ์‚ฌ์šฉ์ด ๊ฐ€๋Šฅํ•˜๋‹ค. + +```python +> pip isntall visdom +> python -m visdom.server +# You can navigate to http://localhost:PORT +``` + +์„œ๋ฒ„๋ฅผ ์‹คํ–‰ํ•œ ํ›„์— ๋‚˜์˜ค๋Š” localhost ์ฃผ์†Œ๋ฅผ ํ†ตํ•ด visdom ํ™”๋ฉด์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. + +#### AttributeError + +ํ•„์ž์˜ ๊ฒฝ์šฐ `AttributeError: module 'brotli' has no attribute 'error'`๋ผ๋Š” ์—๋Ÿฌ๊ฐ€ ๋ฐœ์ƒํ•ด์„œ ์•„๋ž˜ ์ฝ”๋“œ๋ฅผ ํ†ตํ•ด `brotli` module์„ ์ถ”๊ฐ€์„œ ์„ค์น˜ํ•˜์—ฌ ํ•ด๊ฒฐํ•˜์˜€๋‹ค. + +```python +conda install -c conda-forge brotlipy +``` + +### Text + +๋จผ์ € ์„ค์น˜์— ๋ฌธ์ œ๊ฐ€ ์—†๋Š”์ง€ ํ™•์ธํ•˜๋Š” ๊ฒธ ๊ฐ„๋‹จํ•œ text๋ฅผ ์ถœ๋ ฅํ•ด๋ณด์ž. `vis.text()`์‚ฌ์šฉํ•˜์—ฌ ์ถœ๋ ฅํ•  ์ˆ˜ ์žˆ๋‹ค. + +```python +vis.text("Hello, world!",env="main") +``` + +![](/posting_imgs/lab10-3-1.png) + +์œ„์™€๊ฐ™์ด ์ƒˆ๋กœ์šด ์ฐฝ์— Hello, world๊ฐ€ ์ถœ๋ ฅ๋˜๋Š” ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. + +### Image + +์ด๋ฒˆ์—๋Š” ์ด๋ฏธ์ง€๋ฅผ ์ถœ๋ ฅํ•ด๋ณด์ž + +๋ฌด์ž‘์œ„ ํ”ฝ์…€๋กœ ์ƒ์„ฑํ•œ 200x200 ์ด๋ฏธ์ง€์™€ 3๊ฐœ์˜ 28x28 ์ด๋ฏธ์ง€๋ฅผ ๋งŒ๋“ค๊ณ  ์ถœ๋ ฅํ•ด๋ณด๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. ์ด ๋•Œ๋Š” ํ•˜๋‚˜์˜ ์ด๋ฏธ์ง€๋Š” `vis.image()`๋ฅผ, ์—ฌ๋Ÿฌ ์ด๋ฏธ์ง€๋Š” `vis.images()`๋ฅผ ์‚ฌ์šฉํ•œ๋‹ค. + +```python +a=torch.randn(3,200,200) +vis.image(a) +vis.images(torch.Tensor(3,3,28,28)) +``` + +![](/posting_imgs/lab10-3-2.png) + +๋‹ค์Œ์€ ์กฐ๊ธˆ ๋” ์ด๋ฏธ์ง€ ๋‹ค์šด ๋ฐ์ดํ„ฐ์ธ mnist์™€ CIFAR10๋ฅผ ์ถœ๋ ฅํ•˜๋ ค ํ•œ๋‹ค. + +```python +MNIST = dsets.MNIST(root="./MNIST_data",train = True,transform=torchvision.transforms.ToTensor(), download=True) +cifar10 = dsets.CIFAR10(root="./cifar10",train = True, transform=torchvision.transforms.ToTensor(),download=True) + +#CIFAR10 +data = cifar10.__getitem__(0) +print(data[0].shape) +vis.images(data[0],env="main") + +# MNIST +data = MNIST.__getitem__(0) +print(data[0].shape) +vis.images(data[0],env="main") +``` + +![](/posting_imgs/lab10-3-3.png) + +๋‘๊บผ๋น„(?)์™€ ์ˆซ์ž 5๊ฐ€ ์ž˜ ๋‚˜์˜จ๋‹ค. ๋˜ํ•œ ์ด๋Ÿฐ ์ด๋ฏธ์ง€๋“ค๋„ ๋‹น์—ฐํžˆ `vis.images()`๋ฅผ ํ†ตํ•ด ํ•œ๋ฒˆ์— ๋งŽ์€ ์ด๋ฏธ์ง€๋„ ์ถœ๋ ฅํ•  ์ˆ˜ ์žˆ๋‹ค. + +```py +data_loader = torch.utils.data.DataLoader(dataset = MNIST, + batch_size = 32, + shuffle = False) +for num, value in enumerate(data_loader): + value = value[0] + print(value.shape) + vis.images(value) + break +``` + +![](/posting_imgs/lab10-3-4.png) + +๊ตณ์ด for๋ฌธ์„ ์“ฐ์ง€ ์•Š๊ณ ๋„ ๋‹ค์Œ๊ณผ ๊ฐ™์ด iter ๊ฐ์ฒด๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ์ถœ๋ ฅํ•  ์ˆ˜๋„ ์žˆ๋‹ค. + +```py +img = next(iter(data_loader))[0] +vis.images(img) +``` + +์ง€๊ธˆ๊นŒ์ง€ ๋„์šด ์ฐฝ๋“ค์„ ๋ชจ๋‘ ๋„๊ณ ์‹ถ์œผ๋ฉด ๋‹ค์Œ ์ฝ”๋“œ๋ฅผ ์‹คํ–‰ํ•ด์„œ ๋Œ ์ˆ˜ ์žˆ๋‹ค. + +```py +vis.close(env="main") +``` + +์ด๋ ‡๊ฒŒ ํ•˜๋ฉด main์— ๋„์›Œ์ง„ ๊ฒƒ๋“ค์„ ๋ชจ๋‘ ๋Œ ์ˆ˜ ์žˆ๋‹ค. + +### Line Plot + +Lint Plot์€ `vis.line()`์— X, Y ๋ฐ์ดํ„ฐ๋ฅผ ๋„ฃ์–ด ์„ ํ˜• ๊ทธ๋ž˜ํ”„๋ฅผ ๊ทธ๋ฆด ์ˆ˜ ์žˆ๋‹ค. + +```py +Y_data = torch.randn(5) +plt = vis.line (Y=Y_data) + +X_data = torch.Tensor([1,2,3,4,5]) +plt = vis.line(Y=Y_data, X=X_data) +``` + +![](/posting_imgs/lab10-3-5.png) + +๊ฐ€์žฅ ๊ฐ„๋‹จํ•œ ์˜ˆ์ œ๋กœ Y ๋ฐ์ดํ„ฐ๋งŒ ์„ค์ •ํ•ด ์ค€ ๊ฒƒ์ธ๋ฐ, ์ด ๊ฒฝ์šฐ์— X์ถ•์€ ๋ฌด์กฐ๊ฑด 0๊ณผ 1 ์‚ฌ์ด๋ฅผ ๋‚˜๋ˆ  point๋ฅผ ์ƒ์„ฑํ•œ๋‹ค. +๋งŒ์•ฝ X ๊ฐ’๋“ค์„ ๋‹ค๋ฅด๊ฒŒ ๋งŒ๋“ค์–ด์ฃผ๊ณ  ์‹ถ์œผ๋ฉด ์ƒˆ๋กœ์šด tensor๋ฅผ ๋งŒ๋“ค์–ด ๋„ฃ์–ด์ฃผ๋ฉด ๋œ๋‹ค. + +๊ธฐ์กด์˜ plot์— point๋ฅผ ์ถ”๊ฐ€ํ•  ์ˆ˜๋„ ์žˆ๋‹ค. `vis.line()`์— ์ƒˆ๋กœ ๋„ฃ์„ ๋ฐ์ดํ„ฐ์™€ ๊ทธ ๋ฐ์ดํ„ฐ๋ฅผ ์ถ”๊ฐ€ํ•  plot์„ ๋„ฃ์–ด์ฃผ๊ณ , `update='append'`๋กœ ์„ค์ •ํ•œ๋‹ค. + +```py +Y_append = torch.randn(1) +X_append = torch.Tensor([6]) + +vis.line(Y=Y_append, X=X_append, win=plt, update='append') +``` +![](/posting_imgs/lab10-3-6.png) + +๋‘๊ฐœ์˜ ๋‹ค๋ฅธ ๊ทธ๋ž˜ํ”„๋ฅผ ๋น„๊ตํ•˜๊ธฐ ์œ„ํ•ด ๊ฒน์ณ ๊ทธ๋ฆฌ๊ณ  ์‹ถ์œผ๋ฉด (n, 2) shape์˜ ๋ฐ์ดํ„ฐ์™€ ๊ทธ์— ๋งž๋Š” X ๋ฐ์ดํ„ฐ๋ฅผ ๋„ฃ์–ด์ฃผ๋ฉด ๋œ๋‹ค. + +```py +num = torch.Tensor(list(range(0,10))) +num = num.view(-1,1) +num = torch.cat((num,num),dim=1) + +plt = vis.line(Y=torch.randn(10,2), X = num) +``` + +![](/posting_imgs/lab10-3-7.png) + +X๋Š” 0-9์ด 2์ค„ ์ €์žฅ๋˜์–ด์žˆ๋Š” ๋ฐ์ดํ„ฐ, Y๋Š” (10,2)์˜ ๋žœ๋ค ๋ฐ์ดํ„ฐ์ด๋‹ค. + +๊ทธ๋ž˜ํ”„์— ๋Œ€ํ•œ ๋ฒ”๋ก€๋Š” `showlegend=True`๋กœ ๋ณด์ด๊ฒŒ ํ•  ์ˆ˜ ์žˆ๊ณ , `legend = []`๋ฅผ ํ†ตํ•ด ์ง์ ‘ ์ง€์ •ํ•ด ์ค„ ์ˆ˜ ์žˆ๋‹ค. defualt๋Š” ๊ทธ๋‚ญ ์ •์ˆ˜๋กœ ๋‚˜์˜จ๋‹ค. + +```py +plt = vis.line(Y=Y_data, X=X_data, opts = dict(title='Test', showlegend=True)) +plt = vis.line(Y=Y_data, X=X_data, opts = dict(title='Test', legend = ['1๋ฒˆ'],showlegend=True)) +plt = vis.line(Y=torch.randn(10,2), X = num, opts=dict(title='Test', legend=['1๋ฒˆ','2๋ฒˆ'],showlegend=True)) +``` + +![](/posting_imgs/lab10-3-8.png) + +
+ +๋งˆ์ง€๋ง‰์œผ๋กœ, ๊ฐ€์žฅ ์žฌ๋ฐŒ๊ฒŒ ๋ดค๋˜ ๊ธฐ๋Šฅ์ธ๋ฐ ์•„๊นŒ ๋‚˜์˜จ append ๊ธฐ๋Šฅ์„ ํ†ตํ•ด ์‹คํ–‰์ด ๋ฐ˜๋ณต๋  ๋•Œ๋งˆ๋‹ค plot์„ ์ž๋™์œผ๋กœ ์—…๋ฐ์ดํŠธํ•˜๋„๋ก ์ฝ”๋“œ๋ฅผ ์ž‘์„ฑํ•  ์ˆ˜ ์žˆ๋‹ค. + +```py +def loss_tracker(loss_plot, loss_value, num): + '''num, loss_value, are Tensor''' + vis.line(X=num, + Y=loss_value, + win = loss_plot, + update='append' + ) + +plt = vis.line(Y=torch.Tensor(1).zero_()) + +for i in range(500): + loss = torch.randn(1) + i + loss_tracker(plt, loss, torch.Tensor([i])) +``` + +![](/posting_imgs/lab10-3-9.gif) + +์‹ค์ œ๋กœ ํ•™์Šตํ•  ๋•Œ loss๋ฅผ ๋„ฃ์–ด ํ•™์Šต์„ ๋ชจ๋‹ˆํ„ฐ๋งํ•˜๋Š”๋ฐ ์œ ์šฉํ•˜๊ฒŒ ์“ธ ์ˆ˜ ์žˆ๋‹ค. + +[์ด์ „ ํฌ์ŠคํŒ…](/posts/dlZeroToAll-PyTorch-10-3/)์—์„œ ํ•™์Šตํ•  ๋•Œ ์‚ฌ์šฉํ–ˆ๋˜ ์ฝ”๋“œ์—์„œ `loss_tracker`๋ฅผ ์ถ”๊ฐ€ํ•˜์—ฌ ์‹คํ–‰ํ•˜๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™์ด ๋ชจ๋‹ˆํ„ฐ๋ง์ด ๊ฐ€๋Šฅํ•˜๋‹ค. + +```py +total_batch = len(data_loader) + +for epoch in range(training_epochs): + avg_cost = 0 + + for X, Y in data_loader: + X = X.to(device) + Y = Y.to(device) + + optimizer.zero_grad() + hypothesis = model(X) + + cost = criterion(hypothesis, Y) + cost.backward() + optimizer.step() + + avg_cost += cost / total_batch + + print('[Epoch:{}] cost = {}'.format(epoch+1, avg_cost)) + # tracking + loss_tracker(loss_plt, torch.Tensor([avg_cost]), torch.Tensor([epoch])) +print('Learning Finished!') +``` + +![](/posting_imgs/lab10-3-10.gif) + +loss๊ฐ€ ์–ด๋–ป๊ฒŒ ๋ณ€ํ•˜๊ณ  ์žˆ๋Š”์ง€ ํ™•์ธํ•˜๋Š”๋ฐ ๋งค์šฐ ์ ํ•ฉํ•œ ๊ธฐ๋Šฅ์ธ๊ฒƒ ๊ฐ™๋‹ค. \ No newline at end of file diff --git a/_posts/2022-05-23-dlZeroToAll-PyTorch-10-4.markdown b/_posts/2022-05-23-dlZeroToAll-PyTorch-10-4.markdown new file mode 100644 index 00000000000..ddc35a6f2c6 --- /dev/null +++ b/_posts/2022-05-23-dlZeroToAll-PyTorch-10-4.markdown @@ -0,0 +1,258 @@ +--- +title: "๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹ 2 - Lab10-4: ImageFolder" +author: Kwon +date: 2022-05-23T01:00:00 +0900 +categories: [pytorch, study] +tags: [imagefolder, cnn, transform] +math: true +mermaid: false +--- +[๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹](https://deeplearningzerotoall.github.io/season2/lec_pytorch.html) Lab10-4: ImageFolder ๊ฐ•์˜๋ฅผ ๋ณธ ํ›„ ๊ณต๋ถ€๋ฅผ ๋ชฉ์ ์œผ๋กœ ์ž‘์„ฑํ•œ ๊ฒŒ์‹œ๋ฌผ์ž…๋‹ˆ๋‹ค. + +*** + +## ImageFolder + +`torchvision.datasets`์— ์žˆ๋Š” ImageFolder๋Š” directory์— ๋”ฐ๋ผ category๋ฅผ ์ž๋™์œผ๋กœ labeling ํ•˜์—ฌ ๋ฐ์ดํ„ฐ๋กœ ๋งŒ๋“ค์–ด ์ค€๋‹ค. ์šฐ๋ฆฌ๊ฐ€ ์ฐ์€ ์‚ฌ์ง„์„ ํ•™์Šตํ•˜๋Š”๋ฐ ์‚ฌ์šฉํ•  ๋•Œ ์•„์ฃผ ์ข‹์€ ๊ธฐ๋Šฅ์ด๋‹ค. + +์ด๋ฒˆ ๊ฐ•์˜์—์„œ๋Š” ๋ฏธ๋ฆฌ ์ฐ์–ด์„œ ์ œ๊ณตํ•ด ์ค€ ํšŒ์ƒ‰, ๋นจ๊ฐ„์ƒ‰ ์˜์ž ์‚ฌ์ง„ ๋ถ„๋ฅ˜๋ฅผ ํ•ด๋ณผ ๊ฒƒ์ด๋‹ค. + +๋จผ์ € category์— ๋งž๊ฒŒ directory๋ฅผ ์ƒ์„ฑํ•ด์ฃผ์–ด์•ผ ImageFolder๋ฅผ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ์œผ๋ฏ€๋กœ, directory๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™์€ ๊ตฌ์กฐ์—ฌ์•ผ ํ•œ๋‹ค. + +![](/posting_imgs/lab10-4-1.png) + +```py +import torchvision +from torchvision import transforms + +from torch.utils.data import DataLoader + +from matplotlib.pyplot import imshow +%matplotlib inline + +trans = transforms.Compose([ + transforms.Resize((64,128)) +]) + +train_data = torchvision.datasets.ImageFolder(root='custom_data/origin_data', transform=trans) +``` + +์›๋ณธ ๋ฐ์ดํ„ฐ๊ฐ€ ์žˆ๋Š” ๊ณณ์„ root๋กœ ์žก๊ณ  `Compose`๋ฅผ ํ†ตํ•ด ์ ์šฉํ•  `transforms`๋“ค์„ ๋ฌถ์–ด ๋„ฃ์–ด์ค€๋‹ค. ์›๋ณธ ๋ฐ์ดํ„ฐ๊ฐ€ 265x512๋กœ ๋„ˆ๋ฌด ์ปค์„œ 64x128๋กœ ๋ฐ”๊พธ์–ด์ฃผ๋Š” ๊ณผ์ •์„ ๊ฑฐ์นœ๋‹ค. +์—ฌ๊ธฐ์„œ๋Š” ํ•˜๋‚˜์˜ `transforms`์„ ์‚ฌ์šฉํ•˜์ง€๋งŒ ์–ด๋Ÿฌ๊ฐœ๋ฅผ ์‚ฌ์šฉํ•ด์•ผํ•  ๋•Œ `Compose`๋กœ ๋ฌถ์–ด ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ๋‹ค. + +์ด๋ ‡๊ฒŒ ๋ถˆ๋Ÿฌ์˜จ ๋ฐ์ดํ„ฐ๋“ค์„ ์ •๋ฆฌํ•˜์—ฌ train data๋กœ ๋งŒ๋“ค์–ด์ค€๋‹ค. directory ์ƒ gray๊ฐ€ ๋” ๋น ๋ฅด๋ฏ€๋กœ label 0, red๊ฐ€ label 1์ด๋‹ค. + +```py +for num, value in enumerate(train_data): + data, label = value + print(num, data, label) + + if(label == 0): + data.save('custom_data/train_data/gray/%d_%d.jpeg'%(num, label)) + else: + data.save('custom_data/train_data/red/%d_%d.jpeg'%(num, label)) +``` + +*** + +## Train + +### Imports and Data + +```py +import torch +import torch.nn as nn +import torch.nn.functional as F + +import torch.optim as optim +from torch.utils.data import DataLoader + +import torchvision +import torchvision.transforms as transforms + +device = 'cuda' if torch.cuda.is_available() else 'cpu' + +torch.manual_seed(777) +if device =='cuda': + torch.cuda.manual_seed_all(777) + +trans = transforms.Compose([ + transforms.ToTensor() +]) + +train_data = torchvision.datasets.ImageFolder(root='./custom_data/train_data', transform=trans) +data_loader = DataLoader(dataset = train_data, batch_size = 8, shuffle = True, num_workers=2) +``` + +์•ž์„œ ๋งŒ๋“ค์–ด๋†“์€ train data๋ฅผ `ImageFolder`๋กœ ๋ถˆ๋Ÿฌ์™€์„œ ์‚ฌ์šฉํ•œ๋‹ค. ๋ฌผ๋ก  ์ž๋™์œผ๋กœ label์„ ๋ถ™์—ฌ ๋ฐ์ดํ„ฐ๋ฅผ ์ƒ์„ฑํ•ด์ค€๋‹ค. + +### Model and Loss/Optimizer + +๋‘ ๋ฒˆ์˜ CNN layer๋ฅผ ๊ฑฐ์น˜๊ณ  FC layer๋ฅผ ํ•˜๋‚˜ ํ†ต๊ณผ์‹œํ‚ค๋Š” [lab10-2](/posts/dlZeroToAll-PyTorch-10-3/)์—์„œ ์‚ฌ์šฉํ•œ ๊ฒƒ๊ณผ data shape๋ง๊ณ ๋Š” ๊ฑฐ์˜ ๊ฐ™์€ ๋ชจ๋ธ์ด๋‹ค. + +```py +class CNN(nn.Module): + def __init__(self): + super(CNN, self).__init__() + self.layer1 = nn.Sequential( + nn.Conv2d(3,6,5), + nn.ReLU(), + nn.MaxPool2d(2), + ) + self.layer2 = nn.Sequential( + nn.Conv2d(6,16,5), + nn.ReLU(), + nn.MaxPool2d(2), + ) + self.layer3 = nn.Sequential( + nn.Linear(16*13*29, 120), + nn.ReLU(), + nn.Linear(120,2) + ) + + def forward(self, x): + out = self.layer1(x) + out = self.layer2(out) + out = out.view(out.shape[0], -1) + out = self.layer3(out) + return out +``` + +์ด๋ ‡๊ฒŒ ๋งŒ๋“  model์€ ๊ผญ ํ…Œ์ŠคํŠธ ํ•˜๋Š” ๊ณผ์ •์„ ๊ฑฐ์ณ์•ผ ํ•œ๋‹ค๊ณ  ํ•œ๋‹ค. ํ…Œ์ŠคํŠธ๋Š” ๋„ฃ์œผ๋ ค๋Š” ๋ฐ์ดํ„ฐ์™€ shape์ด ๊ฐ™์€ Tensor๋ฅผ ์ƒ์„ฑํ•˜์—ฌ ํ†ต๊ณผ์‹œ์ผœ๋ณด๋Š” ๊ฒƒ์„ ๋งํ•œ๋‹ค. + +```py +#testing +net = CNN().to(device) +test_input = (torch.Tensor(3,3,64,128)).to(device) +test_out = net(test_input) +``` + +optimizer์™€ loss๋„ ์—ญ์‹œ ๋™์ผํ•˜๋‹ค. + +```py +optimizer = optim.Adam(net.parameters(), lr=0.00005) +loss_func = nn.CrossEntropyLoss().to(device) +``` + +### Train model + +ํ•™์Šต ์—ญ์‹œ ์ด์ „๊ณผ ๋‹ค๋ฅด์ง€ ์•Š๊ฐœ ์ง„ํ–‰ํ•œ๋‹ค. + +```py +total_batch = len(data_loader) + +epochs = 7 +for epoch in range(epochs): + avg_cost = 0.0 + for num, data in enumerate(data_loader): + imgs, labels = data + imgs = imgs.to(device) + labels = labels.to(device) + optimizer.zero_grad() + out = net(imgs) + loss = loss_func(out, labels) + loss.backward() + optimizer.step() + + avg_cost += loss / total_batch + + print('[Epoch:{}] cost = {}'.format(epoch+1, avg_cost)) +print('Learning Finished!') + +'''output +[Epoch:1] cost = 0.6341210007667542 +[Epoch:2] cost = 0.3761218190193176 +[Epoch:3] cost = 0.1116236224770546 +[Epoch:4] cost = 0.03525366261601448 +[Epoch:5] cost = 0.016341226175427437 +[Epoch:6] cost = 0.009176642633974552 +[Epoch:7] cost = 0.005688846111297607 +Learning Finished! +''' +``` + +### Save model + +์ด๋ ‡๊ฒŒ ํ•™์Šตํ•œ ๋ชจ๋ธ์„ ๋งค๋ฒˆ ๋‹ค์‹œ ํ•™์Šตํ•˜๋Š” ๊ฒƒ์€ ๋„ˆ๋ฌด ๋น„ํšจ์œจ์ ์ด๋‹ค. ๊ทธ๋ž˜์„œ ๋ชจ๋ธ์„ ์ €์žฅํ•ด ์ค€๋‹ค. + +```py +torch.save(net.state_dict(), "./model/model.pth") +``` + +๋ถˆ๋Ÿฌ์˜ค๋Š” ๊ฒƒ์€ ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์ƒˆ๋กœ์šด CNN ๊ฐ์ฒด๋ฅผ ์ƒ์„ฑํ•˜์—ฌ ๋„ฃ์–ด์ฃผ๋ฉด ๋œ๋‹ค. + +```py +new_net = CNN().to(device) +new_net.load_state_dict(torch.load('./model/model.pth')) +``` + +๊ธฐ์กด์˜ ๋ชจ๋ธ๊ณผ ๋™์ผํ•œ ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. + +```py +print(net.layer1[0]) +print(new_net.layer1[0]) + +print(net.layer1[0].weight[0][0][0]) +print(new_net.layer1[0].weight[0][0][0]) + +net.layer1[0].weight[0] == new_net.layer1[0].weight[0] + +'''output +Conv2d(3, 6, kernel_size=(5, 5), stride=(1, 1)) +Conv2d(3, 6, kernel_size=(5, 5), stride=(1, 1)) +tensor([-0.0914, 0.0032, -0.0170, -0.0211, 0.0933], device='cuda:0', + grad_fn=) +tensor([-0.0914, 0.0032, -0.0170, -0.0211, 0.0933], device='cuda:0', + grad_fn=) +tensor([[[True, True, True, True, True], + [True, True, True, True, True], + [True, True, True, True, True], + [True, True, True, True, True], + [True, True, True, True, True]], + + [[True, True, True, True, True], + [True, True, True, True, True], + [True, True, True, True, True], + [True, True, True, True, True], + [True, True, True, True, True]], + + [[True, True, True, True, True], + [True, True, True, True, True], + [True, True, True, True, True], + [True, True, True, True, True], + [True, True, True, True, True]]], device='cuda:0') +''' +``` + +### Test + +testํ•  ๋•Œ๋„ train data์™€ ๋˜‘๊ฐ™์ด ์ฒ˜๋ฆฌํ•˜์—ฌ ์‚ฌ์šฉํ•˜๋ฉด ๋œ๋‹ค. + +```py +trans=torchvision.transforms.Compose([ + transforms.Resize((64,128)), + transforms.ToTensor() +]) +test_data = torchvision.datasets.ImageFolder(root='./custom_data/test_data', transform=trans) + +test_set = DataLoader(dataset = test_data, batch_size = len(test_data)) +``` + +์ด๋ ‡๊ฒŒ ํ•˜๋ฉด ์—ญ์‹œ label์ด ๋ถ™์€ ์ฑ„๋กœ data๊ฐ€ ์ƒ์„ฑ๋˜๊ฒŒ ๋œ๋‹ค. + +test ๊ฒฐ๊ณผ๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. + +```py +with torch.no_grad(): + for num, data in enumerate(test_set): + imgs, label = data + imgs = imgs.to(device) + label = label.to(device) + + prediction = net(imgs) + + correct_prediction = torch.argmax(prediction, 1) == label + + accuracy = correct_prediction.float().mean() + print('Accuracy:', accuracy.item()) +``` \ No newline at end of file diff --git a/_posts/2022-05-26-dlZeroToAll-PyTorch-10-5.markdown b/_posts/2022-05-26-dlZeroToAll-PyTorch-10-5.markdown new file mode 100644 index 00000000000..da2fe9851f2 --- /dev/null +++ b/_posts/2022-05-26-dlZeroToAll-PyTorch-10-5.markdown @@ -0,0 +1,456 @@ +--- +title: "๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹ 2 - Lab10-5: VGG" +author: Kwon +date: 2022-05-26T00:00:00 +0900 +categories: [pytorch, study] +tags: [cnn, vgg] +math: true +mermaid: false +--- +[๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹](https://deeplearningzerotoall.github.io/season2/lec_pytorch.html) Lab10-5: Advence CNN(VGG) ๊ฐ•์˜๋ฅผ ๋ณธ ํ›„ ๊ณต๋ถ€๋ฅผ ๋ชฉ์ ์œผ๋กœ ์ž‘์„ฑํ•œ ๊ฒŒ์‹œ๋ฌผ์ž…๋‹ˆ๋‹ค. + +*** + +## VGG-net(Visual Geometry Group-network) + +VGG-net(์ดํ•˜ VGG)์€ 14๋…„๋„ ILSVRC(Imagenet ์ด๋ฏธ์ง€ ์ธ์‹ ๋Œ€ํšŒ)์— ๋‚˜์˜จ ๋„คํŠธ์›Œํฌ๋กœ ์˜ฅ์Šคํฌ๋“œ์˜ Visual Geometry Group์—์„œ ๋งŒ๋“  ๋ชจ๋ธ์ด๋‹ค. + +๋จผ์ € ๋…ผ๋ฌธ์—์„œ ๋ฐœํ‘œํ•œ VGG๋ชจ๋ธ๋“ค์˜ ๊ตฌ์กฐ๋ฅผ ๋ณด์ž. + +![](/posting_imgs/lab10-5-1.png) + +์ด 6๊ฐœ์˜ ๊ตฌ์กฐ๋ฅผ ๋งŒ๋“ค์–ด ์„ฑ๋Šฅ์„ ๋น„๊ตํ•˜์˜€๋Š”๋ฐ E๋กœ ๊ฐˆ ์ˆ˜๋ก ๊นŠ์€ ๋ชจ๋ธ์ด๋ฉฐ ๋ชจ๋ธ์ด ๊นŠ์–ด์งˆ์ˆ˜๋ก ์ข‹์€ ์„ฑ๋Šฅ์„ ๋ณด์˜€๋‹ค๊ณ  ํ•œ๋‹ค. + +VGG๋Š” ์—ฌ๋Ÿฌ ์ธต์— ๋”ฐ๋ผ ์ด๋ฆ„์„ ๋ถ™์—ฌ์ฃผ๋Š”๋ฐ ์˜ˆ๋ฅผ ๋“ค๋ฉด E์˜ ๊ฒฝ์šฐ ์ด 19์ธต(16(conv) + 3(fc) = 19)์ด๋ฏ€๋กœ VGG19์ด๋‹ค. + +### VGG16 Architecture + +๋’ค์˜ ํ•™์Šต์—์„œ๋„ ์‚ฌ์šฉํ•  VGG16์˜ ๊ตฌ์กฐ๋ฅผ ๋Œ€ํ‘œ์ ์œผ๋กœ ํ™•์ธํ•ด๋ณด์ž. Imagenet์˜ ์ด๋ฏธ์ง€ ํฌ๊ธฐ์ธ 224x224์— rgb 3์ฑ„๋„์˜ input์„ ๋ฐ›๋Š” ๋ชจ๋ธ์˜ ๊ตฌ์กฐ์ด๋‹ค. + +๊ตฌ์กฐ๋ฅผ ๋ณด๋ฉด 3x3 ์ปค๋„๋กœ ์—ฌ๋Ÿฌ๋ฒˆ์˜ conv๋ฅผ ์ง„ํ–‰ํ•˜๋Š”๋ฐ ๊ทธ ์ด์œ ๋Š” ํ›ˆ๋ จํ•ด์•ผํ•  ๊ฐ€์ค‘์น˜๋ฅผ ์ค„์ด๊ธฐ ์œ„ํ•ด์„œ์ด๋‹ค. + +224x224 ํฌ๊ธฐ์˜ ํ–‰๋ ฌ์— 3x3์ปค๋„๋กœ stride=1 ์˜ conv๋ฅผ 2๋ฒˆ ํ•˜๋ฉด output size๋Š” 220x220์ด๊ณ , + +\\[ Output \, size = \frac{input \, size - filter \, size + (2*padding)}{Stride} + 1 \\\\\\ +=((224 - 3) + 1)-3 + 1 = 220 + \\] + +5x5 ์ปค๋„๋กœ stride=1 ์˜ conv๋ฅผ ํ•˜๋ฉด output size๋Š” $224-5+1=220$๋กœ output size๊ฐ€ ๊ฐ™์€ ๊ฒƒ์„ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. ์ฐธ๊ณ ๋กœ 3๋ฒˆ์˜ 3x3 conv๋ฅผ ํ•˜๋ฉด 7x7 conv ํ•œ๋ฒˆ๊ณผ size๊ฐ€ ๊ฐ™๋‹ค. + +๊ฐ™์€ output size์ง€๋งŒ 3x3 conv๋ฅผ ์‚ฌ์šฉํ•œ ๊ฒฝ์šฐ์—๋Š” ํ•™์Šตํ•ด์•ผํ•  ๊ฐ€์ค‘์น˜๋Š” $2(3\times3)=18$์ด๊ณ , 5x5์˜ ๊ฒฝ์šฐ์—๋Š” $5\times5=25$๋กœ ํ•™์Šตํ•ด์•ผํ•  ์–‘์ด ๋” ๋งŽ๋‹ค. ๋˜ํ•œ ์—ฌ๋Ÿฌ๊ฐœ์˜ conv ์ธต์„ ์‚ฌ์šฉํ•  ๊ฒฝ์šฐ ๋น„์„ ํ˜•์„ฑ์ด ๋” ๋Š˜์–ด๋‚˜์„œ ๋ณด๋‹ค ๋ณต์žกํ•œ ๋ฐ์ดํ„ฐ๋ฅผ ์ž˜ ํ•™์Šตํ•  ์ˆ˜ ์žˆ๋‹ค๋Š” ์žฅ์ ๋„ ์žˆ๋‹ค. + +๋˜ ๋‹ค๋ฅธ VGG์˜ ํŠน์ง•์œผ๋กœ๋Š” conv๋งˆ๋‹ค padding=1์„ ํ•ด์ค˜์„œ conv ์ „ํ›„์˜ size๋ฅผ ๊ฐ™๊ฒŒ ๋งŒ๋“ค์–ด์ค€๋‹ค๋Š” ๊ฒƒ์ด๋‹ค. ์ด๋Ÿฐ ํŠน์„ฑ๋“ค์„ ํ™•์ธํ•˜๊ณ  VGG16์˜ ๊ตฌ์กฐ๋ฅผ ์‚ดํŽด๋ณด์ž. + +![VGG16 Architecture](/posting_imgs/lab10-5-2.png) + +์•ž์„œ ์–ธ๊ธ‰ํ–ˆ๋“ฏ์ด 3x3 conv๋ฅผ padding=1๋กœ 2~3๋ฒˆ ์ ์šฉํ•˜๊ณ  max plooing(kernel size=2, stride=2)์„ ํ•˜์—ฌ ํฌ๊ธฐ๋ฅผ ์ค„์—ฌ ๋‹ค์Œ conv๋กœ ๋„˜๊ฒจ์ค€๋‹ค. (์ด๋•Œ activation์€ ReLU) + +์ด๋ ‡๊ฒŒ conv + max pooling์˜ ํฐ layer๋ฅผ 5๋ฒˆ ํ†ต๊ณผํ•˜๊ณ  ๋‚˜๋ฉด data๋ฅผ plattenํ•˜๊ฒŒ ๋งŒ๋“ค์–ด์„œ fully connected layer๋ฅผ ํ†ต๊ณผ์‹œํ‚จ๋‹ค. +์ด๋•Œ fully connected layer๋Š” ๋งˆ์ง€๋ง‰ max pooling์˜ output size์— ๋งž์ถฐ $7\times7\times512$ ์˜ input์„ ๋ฐ›์„ ์ˆ˜ ์žˆ๊ฒŒ ํ•œ๋‹ค. ๋งˆ์ง€๋ง‰ layer์—์„œ๋Š” imagenet์˜ class ๊ฐœ์ˆ˜์ธ 1000์œผ๋กœ ๋งž์ถฐ์ฃผ๊ณ  softmax๋ฅผ ์ ์šฉ์‹œํ‚จ๋‹ค. + +์ด๋ฅผ ์ฝ”๋“œ๋กœ ํ’€์–ด ํ‘œํ˜„ํ•˜๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. + +```py +conv2d = nn.Conv2d(3, 64, kernel_size=3, padding=1) +nn.ReLU(inplace=True) +conv2d = nn.Conv2d(3, 64, kernel_size=3, padding=1) +nn.ReLU(inplace=True) + +nn.MaxPool2d(kernel_size=2, stride=2) + +conv2d = nn.Conv2d(64, 128, kernel_size=3, padding=1) +nn.ReLU(inplace=True) +conv2d = nn.Conv2d(128, 128, kernel_size=3, padding=1) +nn.ReLU(inplace=True) + +nn.MaxPool2d(kernel_size-2, stride=2) + +conv2d = nn.Conv2d(128, 256, kernel_size=3, padding=1) +nn.ReLU(inplace=True) +conv2d = nn.Conv2d(256, 256, kernel_size=3, padding=1) +nn.ReLU(inplace=True) +conv2d = nn.Conv2d(256, 256, kernel_size=3, padding=1) +nn.ReLU(inplace=True) + +nn.MaxPool2d(kernel_size=2, stride=2) + +conv2d = nn.Conv2d(256, 512, kernel_size=3, padding=1) +nn.ReLU(inplace=True) +conv2d = nn.Conv2d(512, 512, kernel_size=3, padding=1) +nn.ReLU(inplace=True) +conv2d = nn.Conv2d(512, 512, kernel_size=3, padding=1) +nn.ReLU(inplace=True) + +nn.MaxPool2d(kernel_size=2, stride=2) + +conv2d = nn.Conv2d(512, 512, kernel_size=3, padding=1) +nn.ReLU(inplace=True) +conv2d = nn.Conv2d(512, 512, kernel_size=3, padding=1) +nn.ReLU(inplace=True) +conv2d = nn.Conv2d(512, 512, kernel_size=3, padding=1) +nn.ReLU(inplace=True) + +nn.MaxPool2d(kernel_size=2, stride=2) + +x = x.view(x.size(0), -1) # flatten + +nn.Linear(512 * 7 * 7, 4096), +nn.ReLU(True), +nn.Dropout(), +nn.Linear(4096, 4096), +nn.ReLU(True), +nn.Dropout(), +nn.Linear(4096, 1000), +``` + +*** + +## Code with VGG + +PyTorch์— ๊ตฌํ˜„๋œ VGG๋ฅผ ํ•œ๋ฒˆ ๋œฏ์–ด๋ณด์ž. ๋จผ์ € conv layer๋ฅผ ๋งŒ๋“œ๋Š” ๊ณผ์ •์ด๋‹ค. + +```py +cfg = { + 'A': [64, 'M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'], #8(conv) + 3(fc) =11 == vgg11 + 'B': [64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'], # 10 + 3 = vgg 13 + 'D': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M'], #13 + 3 = vgg 16 + 'E': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512, 'M', 512, 512, 512, 512, 'M'], # 16 +3 =vgg 19 + 'custom' : [64,64,64,'M',128,128,128,'M',256,256,256,'M'] +} + +def make_layers(cfg, batch_norm=False): + layers = [] + in_channels = 3 + + for v in cfg: + if v == 'M': + layers += [nn.MaxPool2d(kernel_size=2, stride=2)] + else: + conv2d = nn.Conv2d(in_channels, v, kernel_size=3, padding=1) + + if batch_norm: + layers += [conv2d, nn.BatchNorm2d(v), nn.ReLU(inplace=True)] + else: + layers += [conv2d, nn.ReLU(inplace=True)] + in_channels = v + return nn.Sequential(*layers) +``` + +`cfg`๋กœ ์ •์˜๋œ dictionary์—์„œ ๋ชจ๋ธ์„ ๊ณจ๋ผ ๋„ฃ์–ด์ฃผ๋ฉด ๋œ๋‹ค. ์šฐ๋ฆฌ๊ฐ€ ๋งŒ๋“ค๊ณ  ์‹ถ์€ ๊ฒƒ์€ VGG16์ด๋‹ˆ๊นŒ `cfg['D']`๋ฅผ `make_layers`์— ๋„ฃ์–ด์ฃผ๋ฉด ํ•จ์ˆ˜๊ฐ€ ์ง€์ •ํ•œ ๋ชจ๋ธ ์ฝ”๋“œ(D)์˜ list์— ๋”ฐ๋ผ `nn.Sequential`๋กœ ๋ฌถ์ธ ๋ชจ๋ธ์„ ๋ฐ˜ํ™˜ํ•ด์ค€๋‹ค. + +conv layer ๋ถ€๋ถ„์„ ๋ชจ๋‘ ๋งŒ๋“ค์—ˆ์œผ๋‹ˆ ์ด์ œ fc layer๋ฅผ ์ด์–ด์„œ VGG๋ฅผ ์™„์„ฑํ•˜๋ฉด ๋œ๋‹ค. + +```py +class VGG(nn.Module): + def __init__(self, features, num_classes=1000, init_weights=True): + super(VGG, self).__init__() + + self.features = features #convolution + + self.avgpool = nn.AdaptiveAvgPool2d((7, 7)) + + self.classifier = nn.Sequential( + nn.Linear(512 * 7 * 7, 4096), + nn.ReLU(True), + nn.Dropout(), + nn.Linear(4096, 4096), + nn.ReLU(True), + nn.Dropout(), + nn.Linear(4096, num_classes), + )#FC layer + + if init_weights: + self._initialize_weights() + + def forward(self, x): + x = self.features(x) #Convolution + x = self.avgpool(x) # avgpool + x = x.view(x.size(0), -1) #flatten + x = self.classifier(x) #FC layer + return x + + def _initialize_weights(self): + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + if m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, 0.01) + nn.init.constant_(m.bias, 0) +``` + +์•ž์„œ ๋งŒ๋“ค์—ˆ๋˜ CNN์€ `features`์— ์ „๋‹ฌ๋˜๊ณ  7x7๋กœ ๋ฐ”๊ฟ”์ฃผ๊ธฐ ์œ„ํ•œ pooling layer์™€ ๊ณ ์ •์ ์œผ๋กœ ์‚ฌ์šฉ๋˜๋Š” linear layer๋ฅผ ์ •์˜ํ•ด์ค€๋‹ค. + +์ด๋•Œ ๋ชจ๋ธ๋“ค์˜ ๊ฐ€์ค‘์น˜ ์ดˆ๊ธฐํ™”๋Š” `_initialize_weights`์— ์˜ํ•ด ์ด๋ฃจ์–ด์ง€๋Š”๋ฐ conv layer๋Š” `kaiming_normal_`์„ ์‚ฌ์šฉํ•ด์„œ, batch norm layer๋Š” weight๋ฅผ 1๋กœ bias๋ฅผ 0์œผ๋กœ, linear layer๋Š” normal distribution์— bais๋Š” 0์œผ๋กœ ์ดˆ๊ธฐํ™”ํ•œ๋‹ค. + +`forward`์—์„œ๋Š” ๊ฐ layer๋ฅผ ํ†ต๊ณผ์‹œํ‚ค๋ฉด์„œ ํ•™์Šต์„ ํ•œ๋‹ค. ๋‹ค๋งŒ linear layer๋กœ ๋“ค์–ด๊ฐ€๊ธฐ ์ „์— `view`๋ฅผ ํ†ตํ•ด flatํ•˜๊ฒŒ ๋งŒ๋“ค์–ด์ฃผ๋Š” ๊ณผ์ •์ด ์ถ”๊ฐ€๋˜์–ด์•ผ ํ•œ๋‹ค. + +*** + +## Train + +VGG๋ฅผ ํ†ตํ•œ ํ•™์Šต๋„ ํ•œ๋ฒˆ ํ•ด๋ณด์ž. + +### Import and Setting + +```py +import torch +import torch.nn as nn + +import torch.optim as optim + +import torchvision +import torchvision.transforms as transforms + +import visdom + +vis = visdom.Visdom() +vis.close(env="main") + +def loss_tracker(loss_plot, loss_value, num): + '''num, loss_value, are Tensor''' + vis.line(X=num, + Y=loss_value, + win = loss_plot, + update='append' + ) + +device = 'cuda' if torch.cuda.is_available() else 'cpu' + +torch.manual_seed(777) +if device =='cuda': + torch.cuda.manual_seed_all(777) +``` + +์ €๋ฒˆ์— ๋‹ค๋ค˜์—ˆ๋˜ [visdom](/posts/dlZeroToAll-PyTorch-10-3/)์„ ์ด์šฉํ•˜์—ฌ ์‹œ๊ฐํ™”๋ฅผ ํ•˜๋ฉด์„œ ํ•™์Šต์„ ์ง„ํ–‰ํ•œ๋‹ค. + +### Data + +๋ฐ์ดํ„ฐ๋Š” CIFAR10๋ฅผ ์ด์šฉํ•˜์—ฌ ํ•™์Šต์„ ์ง„ํ–‰ํ•œ๋‹ค. + +```py +transform = transforms.Compose( + [transforms.ToTensor(), + transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))]) + +trainset = torchvision.datasets.CIFAR10(root='./cifar10', train=True, + download=True, transform=transform) +trainloader = torch.utils.data.DataLoader(trainset, batch_size=512, + shuffle=True, num_workers=0) + +testset = torchvision.datasets.CIFAR10(root='./cifar10', train=False, + download=True, transform=transform) + +testloader = torch.utils.data.DataLoader(testset, batch_size=4, + shuffle=False, num_workers=0) + +classes = ('plane', 'car', 'bird', 'cat', + 'deer', 'dog', 'frog', 'horse', 'ship', 'truck') +``` + +๋ฐ์ดํ„ฐ์…‹๋“ค์„ ๋ถˆ๋Ÿฌ `DataLoader`๋ฅผ ํ†ตํ•ด ํ•™์Šตํ•˜๊ธฐ ์šฉ์ดํ•˜๊ฒŒ ๋งŒ๋“ค์–ด์ค€๋‹ค. + +```py +import matplotlib.pyplot as plt +import numpy as np + +# get some random training images +dataiter = iter(trainloader) +images, labels = dataiter.next() +vis.images(images/2 + 0.5) + +# print labels +print(' '.join('%5s' % classes[labels[j]] for j in range(4))) + +'''output +truck dog horse truck +''' +``` + +![](/posting_imgs/lab10-5-3.png) + +class์™€ ์ด๋ฏธ์ง€๊ฐ€ ์ž˜ ๋ฐ›์•„์ง„ ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. + +### Model + +```py +import torchvision.models.vgg as vgg + +cfg = [32,32,'M', 64,64,128,128,128,'M',256,256,256,512,512,512,'M'] #13 + 3 =vgg16 + +class VGG(nn.Module): + + def __init__(self, features, num_classes=1000, init_weights=True): + super(VGG, self).__init__() + self.features = features + #self.avgpool = nn.AdaptiveAvgPool2d((7, 7)) -> ๊ตณ์ด ์“ธ ํ•„์š”๊ฐ€ ์—†์Œ + self.classifier = nn.Sequential( + nn.Linear(512 * 4 * 4, 4096), + nn.ReLU(True), + nn.Dropout(), + nn.Linear(4096, 4096), + nn.ReLU(True), + nn.Dropout(), + nn.Linear(4096, num_classes), + ) + if init_weights: + self._initialize_weights() + + def forward(self, x): + x = self.features(x) + #x = self.avgpool(x) + x = x.view(x.size(0), -1) + x = self.classifier(x) + return x + + def _initialize_weights(self): + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + if m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, 0.01) + nn.init.constant_(m.bias, 0) + +vgg16= VGG(vgg.make_layers(cfg),10,True).to(device) +``` + +์•ž์„œ ์ •์˜ํ–ˆ๋˜ VGG16์€ `'D': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M']`์˜ ํ˜•ํƒœ์˜€๋Š”๋ฐ ์œ„์˜ ๋ชจ๋ธ์€ ์กฐ๊ธˆ ๋‹ฌ๋ผ๋ณด์ธ๋‹ค. +์ด๊ฑด CIFAR10์˜ ์ด๋ฏธ์ง€๊ฐ€ 32x32๋กœ imgagenet์˜ ๊ฒƒ๋ณด๋‹ค ์ž‘๊ธฐ ๋•Œ๋ฌธ์ด๋‹ค. ๊ธฐ์กด๋Œ€๋กœ pooling์„ ๋ชจ๋‘ ์ง„ํ–‰ํ•˜๋ฉด ๋ฐ์ดํ„ฐ์˜ ํฌ๊ธฐ๊ฐ€ ๋„ˆ๋ฌด ์ž‘์•„์ง€๋ฏ€๋กœ ๊ทธ๊ฒƒ์„ ๋ฐฉ์ง€ํ•˜๊ธฐ ์œ„ํ•จ์ด๋‹ค. + +๊ฐ™์€ ์ด์œ ๋กœ `avgpool` layer๋„ ๋น ์กŒ๋Š”๋ฐ ๊ธฐ์กด์˜ max pooling๋งŒ ํ•ด๋„ ์ด๋ฏธ 7x7๋ณด๋‹ค ํฌ๊ธฐ๊ฐ€ ์ž‘๊ธฐ ๋•Œ๋ฌธ์ด๋‹ค. + +๋‚˜๋จธ์ง€๋Š” ๋ชจ๋‘ ๊ฐ™์€ ๋ฐฉ์‹์œผ๋กœ ์ž‘์„ฑ๋˜์—ˆ๋‹ค. + +```py +a=torch.Tensor(1,3,32,32).to(device) +out = vgg16(a) +print(out) + +'''output +tensor([[ 0.0125, -0.0020, -0.0270, 0.0210, 0.0100, 0.0126, -0.0009, 0.0242, + -0.0099, 0.0185]], device='cuda:0', grad_fn=) +''' +``` + +testํ–ˆ์„ ๋•Œ๋„ ๋ฌธ์ œ๊ฐ€ ์—†์œผ๋ฏ€๋กœ ์‚ฌ์šฉํ•  ์ค€๋น„๊ฐ€ ๋ชจ๋‘ ๋˜์—ˆ๋‹ค. + +### Optimizer and Loss + +```py +criterion = nn.CrossEntropyLoss().to(device) +optimizer = torch.optim.SGD(vgg16.parameters(), lr = 0.005,momentum=0.9) + +# ํ•™์Šต์ด ์ง„ํ–‰๋จ์— ๋”ฐ๋ผ lr ์กฐ์ ˆ +lr_sche = optim.lr_scheduler.StepLR(optimizer, step_size=5, gamma=0.9) # optimizer์˜ step์ด 5๋ฒˆ ์ง„ํ–‰๋  ๋•Œ๋งˆ๋‹ค gamma๋งŒํผ ๊ณฑํ•จ +``` + +๊ธฐ์กด๊ณผ ๋‹ค๋ฅธ ์ ์€ ํ•™์Šต๋œ ์ •๋„์— ๋”ฐ๋ผ learning rate๋ฅผ ์ค„์ด๋Š” ์ฝ”๋“œ๊ฐ€ ์ถ”๊ฐ€๋˜์—ˆ๋‹ค๋Š” ๊ฒƒ์ด๋‹ค. + +### Train + +```py +print(len(trainloader)) +epochs = 50 + +for epoch in range(epochs): # loop over the dataset multiple times + running_loss = 0.0 + lr_sche.step() + for i, data in enumerate(trainloader, 0): + # get the inputs + inputs, labels = data + inputs = inputs.to(device) + labels = labels.to(device) + + # zero the parameter gradients + optimizer.zero_grad() + + # forward + backward + optimize + outputs = vgg16(inputs) + loss = criterion(outputs, labels) + loss.backward() + optimizer.step() + + # print statistics + running_loss += loss.item() + if i % 30 == 29: # print every 30 mini-batches + loss_tracker(loss_plt, torch.Tensor([running_loss/30]), torch.Tensor([i + epoch*len(trainloader) ])) + print('[%d, %5d] loss: %.3f' % + (epoch + 1, i + 1, running_loss / 30)) + running_loss = 0.0 + + +print('Finished Training') + +'''output +98 +[1, 30] loss: 2.302 +[1, 60] loss: 2.299 +[1, 90] loss: 2.284 +[2, 30] loss: 2.208 +[2, 60] loss: 2.128 +[2, 90] loss: 2.068 +[3, 30] loss: 1.974 +[3, 60] loss: 1.856 +[3, 90] loss: 1.793 +[4, 30] loss: 1.727 +[4, 60] loss: 1.678 +[4, 90] loss: 1.626 +[5, 30] loss: 1.571 +[5, 60] loss: 1.529 +[5, 90] loss: 1.513 +[6, 30] loss: 1.487 +[6, 60] loss: 1.452 +[6, 90] loss: 1.429 +[7, 30] loss: 1.387 +[7, 60] loss: 1.363 +[7, 90] loss: 1.333 +[8, 30] loss: 1.314 +[8, 60] loss: 1.284 +[8, 90] loss: 1.248 +... +[50, 30] loss: 0.034 +[50, 60] loss: 0.030 +[50, 90] loss: 0.030 +Finished Training +''' +``` + +![](/posting_imgs/lab10-5-4.png) + +`lr_sche.step()`์ด ์ถ”๊ฐ€๋œ ๊ฒƒ ๋ง๊ณ  ํฌ๊ฒŒ ๋‹ค๋ฅธ ์ ์€ ์—†๋‹ค. + +### Test + +```py +correct = 0 +total = 0 + +with torch.no_grad(): + for data in testloader: + images, labels = data + images = images.to(device) + labels = labels.to(device) + outputs = vgg16(images) + + _, predicted = torch.max(outputs.data, 1) + + total += labels.size(0) + + correct += (predicted == labels).sum().item() + +print('Accuracy of the network on the 10000 test images: %d %%' % ( + 100 * correct / total)) + +Accuracy of the network on the 10000 test images: 75 % +``` + +์ •ํ™•๋„๋Š” 75%๋กœ ๋‚˜์™”๋‹ค. \ No newline at end of file diff --git a/_posts/2022-05-27-dlZeroToAll-PyTorch-10-6.markdown b/_posts/2022-05-27-dlZeroToAll-PyTorch-10-6.markdown new file mode 100644 index 00000000000..d87bb0f1b5c --- /dev/null +++ b/_posts/2022-05-27-dlZeroToAll-PyTorch-10-6.markdown @@ -0,0 +1,520 @@ +--- +title: "๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹ 2 - Lab10-6: ResNet" +author: Kwon +date: 2022-05-27T00:00:00 +0900 +categories: [pytorch, study] +tags: [cnn, resnet, skip-connection] +math: true +mermaid: false +--- +[๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹](https://deeplearningzerotoall.github.io/season2/lec_pytorch.html) Lab10-6: Advence CNN(ResNet) ๊ฐ•์˜๋ฅผ ๋ณธ ํ›„ ๊ณต๋ถ€๋ฅผ ๋ชฉ์ ์œผ๋กœ ์ž‘์„ฑํ•œ ๊ฒŒ์‹œ๋ฌผ์ž…๋‹ˆ๋‹ค. + +*** + +## Plain Network + +Plain network๋Š” skip connection์„ ์‚ฌ์šฉํ•˜์ง€ ์•Š์€ ์ผ๋ฐ˜์ ์ธ CNN ์‹ ๊ฒฝ๋ง์„ ์˜๋ฏธํ•œ๋‹ค. +์ด๋Ÿฌํ•œ plain net์ด ๊นŠ์–ด์ง€๋ฉด ๊นŠ์–ด์งˆ์ˆ˜๋ก backpropagation์„ ํ•  ๋•Œ ๊ธฐ์šธ๊ธฐ ์†Œ์‹ค์ด๋‚˜ ํญ๋ฐœ์ด ๋ฐœ์ƒํ•  ํ™•๋ฅ ์ด ๋†’์•„์ง„๋‹ค. + +![20-layer vs. 56-layer plain network](/posting_imgs/lab10-6-1.png) + +์œ„ ๊ทธ๋ž˜ํ”„์—์„œ๋„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋“ฏ์ด plain network๋ฅผ ๊นŠ๊ฒŒ ์Œ“์€ ๊ฒƒ์ด ์˜คํžˆ๋ ค ํ•™์Šต์ด ์ž˜ ์•ˆ ๋œ ๊ฒƒ์„ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. +์ด๋Š” ๋„ˆ๋ฌด ๊นŠ์€ plain network์—์„œ ๊ธฐ์šธ๊ธฐ ์†Œ์‹ค ํ˜น์€ ํญ๋ฐœ์ด ๋ฐœ์ƒํ•˜์—ฌ ์›๋ž˜ ์›ํ•˜๋˜ ๋ฐฉํ–ฅ์œผ๋กœ ํ•™์Šต์ด ์•ˆ๋˜์—ˆ๊ธฐ ๋•Œ๋ฌธ์ด๋‹ค. + +*** + +## Identity Mapping + +Classification problem์—์„œ ๋ชจ๋ธ์ด ์™„๋ฒฝํ•˜๊ฒŒ ํ•™์Šตํ•˜์˜€๋‹ค๋ฉด input($x$)๊ณผ output(y)์˜ ์˜๋ฏธ๋Š” ๊ฐ™์•„์•ผ ํ•œ๋‹ค. (๊ฐ•์•„์ง€ ์ด๋ฏธ์ง€๋ฉด ๊ฐ•์•„์ง€ categry) +์ด ๋ง์€ $H(x)$๊ฐ€ $x$๊ฐ€ ๋˜๋„๋ก ํ•™์Šตํ•˜๋ฉด ๋œ๋‹ค๋Š” ๊ฒƒ์ด๋‹ค. ์ฆ‰, $H(x)=x$๊ฐ€ ๋˜๋„๋ก ํ•™์Šต์„ ํ•œ๋‹ค๋Š” ๊ฒƒ์ด๋ฉฐ ์ด๋Š” ์ผ์ข…์˜ **ํ•ญ๋“ฑํ•จ์ˆ˜(identity function)**์ด๋ฏ€๋กœ, +Classification problem์˜ model์„ ์ตœ์ ํ™” ํ•œ๋‹ค๋Š” ๊ฒƒ์€ identity mapping์„ ํ•™์Šตํ•˜๋Š” ๊ฒƒ๊ณผ ๊ฐ™์•„์ง„๋‹ค. + +*** + +## ResNet + +ResNet์€ skip connection์„ ์ ์šฉํ•œ network๋กœ ๊ธฐ์šธ๊ธฐ ์†Œ์‹ค์„ ํ•ด๊ฒฐํ•˜๋ฉด์„œ layer๋ฅผ ๋” ๊นŠ๊ฒŒ ์Œ“์•„ ์„ฑ๋Šฅ์ด ์ข‹์€ ๋ชจ๋ธ์„ ๊ตฌ์„ฑํ•  ์ˆ˜ ์žˆ๋‹ค. + +### Skip(Shortcut) Connection + +๊ธฐ์กด์˜ network์—์„œ๋Š” input ($x$)๋ฅผ target ($y$)๋กœ mapping(identity mapping)ํ•˜๋Š” $H(x)$๋ฅผ ์–ป๋Š” ๊ฒƒ์ด ๋ชฉ์ ์ด์—ˆ๋‹ค. +ํ•˜์ง€๋งŒ identity mapping์„ ํ†ตํ•ด ํ•™์Šต์„ ์ง„ํ–‰ํ•ด๋„ layer๊ฐ€ ์Œ“์ผ์ˆ˜๋ก ๊ธฐ์šธ๊ธฐ ์†Œ์‹ค์ด ๋ฐœ์ƒํ•˜๋Š” ๊ฒƒ์€ ์–ด์ฉ” ์ˆ˜ ์—†๋‹ค. + +์ด๋ฅผ ํ•ด๊ฒฐํ•˜๊ธฐ์œ„ํ•ด ์ œ์•ˆํ•œ ๋ฐฉ๋ฒ•๋ก ์ด Residual learning์ด๋‹ค. ์ด๋Š” $H(x)$๋ฅผ ์ง์ ‘ ํ•™์Šตํ•˜๋Š” ๊ฒƒ์ด ์•„๋‹Œ **์ž”์ฐจ(residual)**, $F(x) = H(x)-x$๋ฅผ ์ตœ์†Œํ™”ํ•˜๋Š” ๊ฒƒ์„ ๋ชฉํ‘œ๋กœ ํ•˜๋Š” ๋ฐฉ๋ฒ•์ด๋‹ค. +์ด ์ž”์ฐจ $F(x)$๋ฅผ ์ตœ์†Œํ™” ํ•˜๋ฉด $H(x) = x$์— ๊ฐ€๊นŒ์šด ์ด์ƒ์ ์ธ ๋ชจ๋ธ์„ ์ฐพ์„ ์ˆ˜ ์žˆ๋‹ค๋Š” ๋ฐœ์ƒ์ด๋‹ค. + +์ด ๋ฐฉ๋ฒ•๋ก ์ด ์ œ์•ˆ๋œ ์ „์ œ๋Š” **residual mapping์ด ๊ทธ๋ƒฅ identity mappingํ•˜๋Š” ๋ฐฉ์‹๋ณด๋‹ค ์ตœ์ ํ™”ํ•˜๊ธฐ ์‰ฝ๋‹ค**์ด๋‹ค. +์ฆ‰, ์ง์ ‘ $H(x)=x$๋ฅผ ๋ชฉ์ ์œผ๋กœ ํ•™์Šตํ•˜๋Š” ๊ฒƒ ๋ณด๋‹ค๋Š” ํ˜„์žฌ block์˜ input(์ด์ „ block์˜ output)์˜ ์ •๋ณด($x$, identity)๋ฅผ ์ง€๋‹Œ ์ฑ„ ์ถ”๊ฐ€์ ์œผ๋กœ ํ•™์Šตํ•˜๋Š” ๊ฒƒ์ด ๋” ์‰ฌ์šด ํ•™์Šต์ด ๋œ๋‹ค๋Š” ๊ฒƒ์ด๋‹ค. + +๋˜ํ•œ, output์— $x$๋ฅผ ๋”ํ•ด์ฃผ๊ฒŒ ๋˜๋ฉด ๋ฏธ๋ถ„์„ ํ•ด๋„ $x$์˜ ๋ฏธ๋ถ„๊ฐ’์€ 1์ด๊ธฐ ๋•Œ๋ฌธ์— ๊ฐ layer๋“ค์€ ์ตœ์†Œ 1์˜ ๋ฏธ๋ถ„๊ฐ’์„ ์ง€๋‹ˆ๊ฒŒ ๋˜์–ด ์•ž์„œ ๋ฌธ์ œ์˜€๋˜ ๊ธฐ์šธ๊ธฐ ์†Œ์‹ค ๋ฌธ์ œ๊ฐ€ ํ•ด๊ฒฐ๋œ๋‹ค. + +์ด๋Ÿฐ network๋ฅผ ๊ตฌ์„ฑํ•˜๊ธฐ ์œ„ํ•ด ์•„๋ž˜์˜ ๊ตฌ์กฐ์™€ ๊ฐ™์ด ์—ฌ๋Ÿฌ layer๋ฅผ ๊ฑด๋„ˆ๋›ฐ์–ด(skip) ํ•œ layer์˜ output์„ ๋‹ค์Œ layer์˜ input์— ๋”ํ•ด์ฃผ๋Š” ๊ตฌ์กฐ๋ฅผ ๋งŒ๋“ค์—ˆ๋‹ค. +identity๊ฐ€ ๋‹ค์Œ output์œผ๋กœ ๊ฐ€๋Š” ์ง€๋ฆ„๊ธธ(shortcut)์„ ๋งŒ๋“ค์–ด ์ฃผ์—ˆ๋‹ค๊ณ  ํ•ด์„œ **shortcut connection**์ด๋ผ๊ณ ๋„ ํ•œ๋‹ค. + +![Skip Connection(Residual learning)](/posting_imgs/lab10-6-2.png) + +์ด๋ ‡๊ฒŒ skip connection์ด ์ ์šฉ๋œ ๋ถ€๋ถ„์„ **residual block**์ด๋ผ๊ณ  ํ•œ๋‹ค. + +### Bottleneck Block + +Residual block์• ์„œ ์ค‘๊ฐ„ layer๋ฅผ 1x1 -> 3x3 -> 1x1 ์˜ bottleneck ๊ตฌ์กฐ๋ฅผ ๋งŒ๋“ค์–ด demension redeuction์„ ํ†ตํ•ด ์—ฐ์‚ฐ ์‹œ๊ฐ„์„ ์ค„์ธ ๊ตฌ์กฐ์ด๋‹ค. + +![](/posting_imgs/lab10-6-3.png" description="Bottleneck Block" %} + +ResNet 18/34์—์„œ๋Š” ์ผ๋ฐ˜์ ์ธ residual block์„ ์‚ฌ์šฉํ•˜๊ณ  ResNet 50/101/152์—์„œ๋Š” bottleneck block์„ ์‚ฌ์šฉํ•œ๋‹ค๊ณ  ํ•œ๋‹ค. + +### ResNet-34 Architecture + +ResNet์€ ์•„๋ž˜์™€ ๊ฐ™์ด 5๊ฐœ์˜ ๊นŠ์ด๋ฅผ ๊ฐ€์ง„ ๊ตฌ์กฐ๊ฐ€ ์กด์žฌํ•œ๋‹ค. + +![ResNet Models](/posting_imgs/lab10-6-4.png) + +๊ณ ์ •๋œ layer๋“ค์„ ์ œ์™ธํ•˜๋ฉด ํฌ๊ฒŒ 4๊ฐœ์˜ layer๊ฐ€ ์žˆ๋Š” ๊ฒƒ์„ ๋ณผ ์ˆ˜ ์žˆ๋Š”๋ฐ, VGG์™€ ๋น„์Šทํ•˜๊ฒŒ 3x3 conv์— padding 1์„ ์ ์šฉํ•˜์—ฌ ํ•˜๋‚˜์˜ ํฐ layer์•ˆ์—์„œ๋Š” ourput size๊ฐ€ ๊ณ ์ •๋˜๋Š” ๊ฒƒ์„ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. + +![์œ„๋ถ€ํ„ฐ ์ˆœ์„œ๋Œ€๋กœ Resnet-34, 34-layer plain, VGG-19](/posting_imgs/lab10-6-5.png) + +๊ทธ๋ฆผ์œผ๋กœ ๋ณด๋ฉด ์œ„์™€ ๊ฐ™์€ ๊ตฌ์กฐ๋ฅผ ์ง€๋‹ˆ๊ณ  ์žˆ๋‹ค. VGG ๊ตฌ์กฐ๋ณด๋‹ค ํ™•์‹คํžˆ ๊นŠ์€ ๊ตฌ์กฐ์ธ ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. + +*** + +## Code with ResNet + +PyTorch์˜ Resnet์„ ๋œฏ์–ด๋ณด์ž. + +### BasicBlock + +๋จผ์ € residual block์ด๋‹ค. + +```py +class BasicBlock(nn.Module): + expansion = 1 + + def __init__(self, inplanes, planes, stride=1, downsample=None): + super(BasicBlock, self).__init__() + self.conv1 = conv3x3(inplanes, planes, stride) + self.bn1 = nn.BatchNorm2d(planes) + self.relu = nn.ReLU(inplace=True) + self.conv2 = conv3x3(planes, planes) + self.bn2 = nn.BatchNorm2d(planes) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + # x.shape = 3, 64, 64 + identity = x + # identity = 3, 64, 64 + out = self.conv1(x) # 3x3 stride = stride = 2 + out = self.bn1(out) + out = self.relu(out) + # out.shape = 3, 32, 32 + out = self.conv2(out) # 3x3 stride = 1 + # out.shape = 3, 32, 32 + out = self.bn2(out) + # out.shape = 3, 32, 32 + # identity = 3, 64, 64 -> ๋ง์…ˆ ๋ถˆ๊ฐ€ + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + out = self.relu(out) + + return out +``` + +์•ž์„œ ์‚ดํŽด๋ดค๋˜ residual block์˜ ๊ตฌ์กฐ์™€ ๋‹ค๋ฅด์ง€ ์•Š์€ ๋ชจ์Šต์ด๋‹ค. 2๊ฐœ์˜ 3x3 conv layer๋ฅผ ํ†ต๊ณผํ•˜๊ณ  ๋งˆ์ง€๋ง‰์— ์ž…๋ ฅ์ธ identity๋ฅผ ๋”ํ•ด์ค€ ํ›„ output์„ ๋‚ด์–ด์ฃผ๋Š” ํ˜•ํƒœ์ด๋‹ค. + +๋‹ค๋งŒ ๋‹ค๋ฅธ ์ ์ด ์žˆ๋‹ค๋ฉด downsample์ด ์žˆ๋Š” ๊ฒƒ์ธ๋ฐ, ์ด๋Š” ์ฃผ์„์œผ๋กœ ์ฒ˜๋ฆฌ๋œ ์˜ˆ์‹œ๋กœ ์„ค๋ช…์ด ๋œ๋‹ค. ๋งŒ์•ฝ 3x64x64์˜ ์ž…๋ ฅ์ด ๋“ค์–ด์˜ค๊ณ  `conv1`์˜ stride๋ฅผ 2๋กœ ํ•œ๋‹ค๋ฉด identity๋ฅผ ๋”ํ•˜๊ธฐ ์ „์˜ `out.shape`์ด 3x32x32๋กœ ๊ธฐ์กด๊ณผ ๋‹ฌ๋ผ์ง€๊ฒŒ ๋˜์–ด identity์™€์˜ ๋ง์…ˆ์ด ๋ถˆ๊ฐ€๋Šฅํ•ด์ง„๋‹ค. +์ด ๊ฒฝ์šฐ identity๋ฅผ downsampleํ•˜์—ฌ shape์„ ๋งž์ถฐ์ค€๋‹ค. ๊ทธ๋ž˜์„œ ๋งŒ์•ฝ stride์— ๋”ฐ๋ผ out์˜ shape์ด ๋‹ฌ๋ผ์ง€๊ฒŒ ๋˜๋Š” ๊ฒฝ์šฐ downsample ์˜ต์…˜์„ ์ฃผ์–ด shape์„ ๋งž์ถฐ์•ผ ํ•œ๋‹ค. + +### Bottleneck + +```py +def conv3x3(in_planes, out_planes, stride=1): + """3x3 convolution with padding""" + return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, + padding=1, bias=False) + + +def conv1x1(in_planes, out_planes, stride=1): + """1x1 convolution""" + return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False) + +class Bottleneck(nn.Module): + expansion = 4 + + def __init__(self, inplanes, planes, stride=1, downsample=None): + super(Bottleneck, self).__init__() + self.conv1 = conv1x1(inplanes, planes) + self.bn1 = nn.BatchNorm2d(planes) + self.conv2 = conv3x3(planes, planes, stride) + self.bn2 = nn.BatchNorm2d(planes) + self.conv3 = conv1x1(planes, planes * self.expansion) + self.bn3 = nn.BatchNorm2d(planes * self.expansion) + self.relu = nn.ReLU(inplace=True) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + identity = x + + out = self.conv1(x) # 1x1 stride = 1 + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) # 3x3 stride = stride + out = self.bn2(out) + out = self.relu(out) + + out = self.conv3(out) # 1x1 stride = 1 + out = self.bn3(out) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + out = self.relu(out) + + return out +``` + +3x3 / 1x1 conv๋ฅผ ๋”ฐ๋กœ ์ •์˜ํ•˜์—ฌ ๋ชจ๋ธ์— ์‚ฌ์šฉํ•˜๊ณ  ์žˆ๋Š” ๋ชจ์Šต์ด๋‹ค. ์ด ๋•Œ๋„ ๊ฐ™์€ ์ด์œ ๋กœ downsample ์˜ต์…˜์ด ์žˆ๋‹ค. + +### ResNet + +์œ„์˜ block class๋“ค์„ ์ด์šฉํ•ด network๋ฅผ ๊ตฌ์„ฑํ•  ์ฐจ๋ก€์ด๋‹ค. + +#### _make_layer + +๋จผ์ € layer๋ฅผ ๋งŒ๋“ค์–ด์ฃผ๋Š” ํ•จ์ˆ˜๋ถ€ํ„ฐ ๋ณด์ž. + +```py + # self.inplanes = 64 + # self.layer1 = self._make_layer(block=Bottleneck, 64, layers[0]=3) + def _make_layer(self, block, planes, blocks, stride=1): + + downsample = None + + # identity ๊ฐ’์„ ๋‚ฎ์ถฐ์„œ shape์„ ๋งž์ถฐ์ฃผ๊ธฐ ์œ„ํ•จ. channel๋„ ๋งž์ถฐ์ฃผ๊ธฐ. + if stride != 1 or self.inplanes != planes * block.expansion: # 64 != 64 * 4 + + downsample = nn.Sequential( + conv1x1(self.inplanes, planes * block.expansion, stride), #conv1x1(256, 512, 2) #conv1x1(64, 256, 2) + nn.BatchNorm2d(planes * block.expansion), #batchnrom2d(512) #batchnrom2d(256) + ) + + layers = [] + layers.append(block(self.inplanes, planes, stride, downsample)) + # layers.append(Bottleneck(64, 64, 1, downsample)) + + self.inplanes = planes * block.expansion #self.inplanes = 128 * 4 + + for _ in range(1, blocks): + layers.append(block(self.inplanes, planes)) # * 3 + + return nn.Sequential(*layers) +``` + +stride๊ฐ€ 1์ด ์•„๋‹ˆ๊ฑฐ๋‚˜ input๊ณผ output์˜ shape์ด ๋งž์ง€ ์•Š์„ ๊ฒฝ์šฐ downsample์„ `conv1x1`์„ ์ด์šฉํ•˜์—ฌ shape์„ ๋งž์ถœ ์ˆ˜ ์žˆ๊ฒŒ ์„ค์ •ํ•ด์ค€๋‹ค. + +๋‹ค์Œ๋ถ€ํ„ฐ๋Š” layer์˜ ํฌ๊ธฐ์— ๋งž๊ฒŒ layer๋ฅผ block ๊ฐœ์ˆ˜ ๋งŒํผ ๋งŒ๋“ค์–ด ์Œ“์€ ํ›„์— ๋ฐ˜ํ™˜ํ•œ๋‹ค. ์ฃผ์„์˜ ์ž…๋ ฅ ์—์‹œ๋ฅผ ํ†ตํ•ด ๋”ฐ๋ผ๊ฐ€ ๋ณด๋ฉด ์ดํ•ด์— ์กฐ๊ธˆ ๋„์›€์ด ๋  ๊ฒƒ ๊ฐ™๋‹ค. + +### __init__ + +```py + # model = ResNet(Bottleneck, [3, 4, 6, 3], **kwargs) => resnet 50 + def __init__(self, block, layers, num_classes=1000, zero_init_residual=False): + super(ResNet, self).__init__() + + self.inplanes = 64 + + self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias=False) + + # outputs = self.conv1(inputs) + # outputs.shape = 64, 112, 112 + + self.bn1 = nn.BatchNorm2d(64) + self.relu = nn.ReLU(inplace=True) + + # inputs = 64, 112, 112 + self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + # output = 64, 56, 56 + + self.layer1 = self._make_layer(block, 64, layers[0]'''3''') + self.layer2 = self._make_layer(block, 128, layers[1]'''4''', stride=2) + self.layer3 = self._make_layer(block, 256, layers[2]'''6''', stride=2) + self.layer4 = self._make_layer(block, 512, layers[3]'''3''', stride=2) + + self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) + self.fc = nn.Linear(512 * block.expansion, num_classes) + + for m in self.modules(): # weight init + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + elif isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + + # Zero-initialize the last BN in each residual branch, + # so that the residual branch starts with zeros, and each residual block behaves like an identity. + # This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677 + if zero_init_residual: + for m in self.modules(): + if isinstance(m, Bottleneck): + nn.init.constant_(m.bn3.weight, 0) + elif isinstance(m, BasicBlock): + nn.init.constant_(m.bn2.weight, 0) +``` + +์ด์ „์— ๋งŒ๋“ค์—ˆ๋˜ ํ•จ์ˆ˜๋“ค์„ ๋ชจ๋‘ ์‚ฌ์šฉํ•˜์—ฌ network๋ฅผ ๋งŒ๋“ ๋‹ค. ๋จผ์ € ๊ณ ์ •์ ์œผ๋กœ ๋“ค์–ด๊ฐˆ 7x7 conv์™€ maxpooling layer๋ฅผ ๋งŒ๋“ค์–ด์ค€๋‹ค. + +๋‹ค์Œ์œผ๋กœ๋Š” `_make_layer`๋ฅผ ํ†ตํ•ด ์ž…๋ ฅํ•œ layer ๊ฐœ์ˆ˜์— ๋งž๊ฒŒ layer๋ฅผ ์ƒ์„ฑํ•œ๋‹ค. ๋งˆ์ง€๋ง‰์œผ๋กœ 1x1๋กœ average pooling์„ ํ•˜๊ณ  class ๊ฐœ์ˆ˜์— ๋งž์ถฐ fc layer๋ฅผ ํ†ต๊ณผ์‹œํ‚ค๋ฉด ๋ชจ๋“  ํ•™์Šต์ด ๋๋‚˜๊ฒŒ ๋œ๋‹ค. + +๊ฐ€์ค‘์น˜์˜ ์ดˆ๊ธฐํ™”๋Š” conv์˜ ๊ฒฝ์šฐ `'fan_out'` mode์˜ `kaiming_normal_`๋ฅผ ์‚ฌ์šฉํ•˜๊ณ , bn(batch normalize)์˜ ๊ฒฝ์šฐ wieght๋ฅผ 1, bias๋ฅผ 0์œผ๋กœ ์ดˆ๊ธฐํ™”ํ•œ๋‹ค. +`zero_init_residual`์˜ต์…˜์„ ์ฃผ๋ฉด ํŠน์ • layer์˜ wieght๋ฅผ 0์œผ๋กœ ์ดˆ๊ธฐํ™”ํ•˜๋Š”๋ฐ ์ด๋Š” ํ•ด๋‹น ๋…ผ๋ฌธ์—์„œ ์ด ์‹œํ–‰์„ ์ ์šฉํ•˜์˜€๋”๋‹ˆ ์„ฑ๋Šฅ์ด 0.2~0.3% ์˜ฌ๋ž๋‹ค๊ณ  ํ•ด์„œ ๋“ค์–ด๊ฐ€ ์žˆ๋Š” ์˜ต์…˜์ด๋ผ๊ณ  ํ•œ๋‹ค. + +### forward + +```py + def forward(self, x): + x = self.conv1(x) + x = self.bn1(x) + x = self.relu(x) + x = self.maxpool(x) + + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + x = self.layer4(x) + + x = self.avgpool(x) + x = x.view(x.size(0), -1) + x = self.fc(x) + + return x +``` + +`forward`์—์„œ๋Š” ์ง€๊ธˆ๊นŒ์ง€ ๋งŒ๋“ค์—ˆ๋˜ ๋ชจ๋ธ๋“ค์„ ์ˆœ์„œ๋Œ€๋กœ ์ด์–ด์„œ ํ•™์Šต๋˜๋„๋ก ํ•œ๋‹ค. + +### ResNet Models + +```py +def resnet18(pretrained=False, **kwargs): + model = ResNet(BasicBlock, [2, 2, 2, 2], **kwargs) #=> 2*(2+2+2+2) +1(conv1) +1(fc) = 16 +2 =resnet 18 + return model + +def resnet50(pretrained=False, **kwargs): + model = ResNet(Bottleneck, [3, 4, 6, 3], **kwargs) #=> 3*(3+4+6+3) +(conv1) +1(fc) = 48 +2 = 50 + return model + +def resnet152(pretrained=False, **kwargs): + model = ResNet(Bottleneck, [3, 8, 36, 3], **kwargs) # 3*(3+8+36+3) +2 = 150+2 = resnet152 + return mode +``` + +์œ„์™€ ๊ฐ™์€ ๋ฐฉ๋ฒ•์œผ๋กœ ์—ฌ๋Ÿฌ resnet์„ ๋งŒ๋“ค์–ด ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ๋‹ค. + +ํ•˜์ง€๋งŒ ์ด๋ฏธ module๋กœ ๋‹ค ๋งŒ๋“ค์–ด์ ธ ์žˆ๊ธฐ ๋•Œ๋ฌธ์— ์•„๋ž˜์™€ ๊ฐ™์ด ๋งŒ๋“ค์–ด์ง„ ๊ฒƒ์„ ์‚ฌ์šฉํ•ด์„œ model์„ ์ƒ์„ฑํ•ด๋„ ์ •์ƒ์ ์œผ๋กœ ์ƒ์„ฑ๋˜๋Š” ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. + +```py +import torchvision.models.resnet as resnet + +res = resnet.resnet50() +res + +'''ourput +ResNet( + (conv1): Conv2d(3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False) + (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (relu): ReLU(inplace=True) + (maxpool): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False) + (layer1): Sequential( + (0): Bottleneck( + (conv1): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1), bias=False) + (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) + (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (relu): ReLU(inplace=True) + (downsample): Sequential( + (0): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) + (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + ) + ) + (1): Bottleneck( + (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False) + (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) +... + ) + ) + (avgpool): AdaptiveAvgPool2d(output_size=(1, 1)) + (fc): Linear(in_features=2048, out_features=1000, bias=True) +) +''' +``` + +*** + +## Train with ResNet + +[VGG](/posts/dlZeroToAll-PyTorch-10-5/)๋ฅผ ํ†ตํ•ด ํ•™์Šตํ–ˆ์„ ๋•Œ์™€ ๋งˆ์ฐฌ๊ฐ€์ง€๋กœ CIFAR10 data๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ํ•™์Šต์„ ์ง„ํ–‰ํ•œ๋‹ค. + +### Data + +๋Œ€๋ถ€๋ถ„ VGG ํ•™์Šตํ•  ๋•Œ์™€ ๊ฐ™๋‹ค. ๋‹ค๋งŒ ๋‹ค๋ฅธ ๋ถ€๋ถ„์€ ์‹ค์ œ data์˜ ํ‰๊ท ๊ณผ ๋ถ„์‚ฐ์„ ๊ตฌํ•ด์„œ normalize๋ฅผ ์ง„ํ–‰ํ•œ๋‹ค. + +```py +transform = transforms.Compose([ + transforms.ToTensor() +]) + +trainset = torchvision.datasets.CIFAR10(root='./cifar10', train=True, download=True, transform=transform) + +print(trainset.data.shape) + +# ๊ฐ ์ถ•๋งˆ๋‹ค ๊ตฌํ•ด์„œ normalization +train_data_mean = trainset.data.mean( axis=(0,1,2) ) +train_data_std = trainset.data.std( axis=(0,1,2) ) + + +print(train_data_mean) +print(train_data_std) + +train_data_mean = train_data_mean / 255 +train_data_std = train_data_std / 255 + +print(train_data_mean) +print(train_data_std) + +transform_train = transforms.Compose([ + transforms.RandomCrop(32, padding=4), + transforms.ToTensor(), + transforms.Normalize(train_data_mean, train_data_std) +]) + +transform_test = transforms.Compose([ + transforms.ToTensor(), + transforms.Normalize(train_data_mean, train_data_std) +]) + +trainset = torchvision.datasets.CIFAR10(root='./cifar10', train=True, + download=True, transform=transform_train) +trainloader = torch.utils.data.DataLoader(trainset, batch_size=256, + shuffle=True, num_workers=0) + +testset = torchvision.datasets.CIFAR10(root='./cifar10', train=False, + download=True, transform=transform_test) + +testloader = torch.utils.data.DataLoader(testset, batch_size=256, + shuffle=False, num_workers=0) + +classes = ('plane', 'car', 'bird', 'cat', + 'deer', 'dog', 'frog', 'horse', 'ship', 'truck') +``` + +### Model + +์ด๋ฒˆ ๊ฐ•์˜์—์„œ๋Š” pyTorch์˜ ResNet์„ ์‚ฌ์šฉํ•˜์ง€ ์•Š๊ณ  ์œ„์—์„œ ์ •์˜ํ•œ class๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ํ•™์Šต์„ ์ง„ํ–‰ํ–ˆ๋‹ค. +์œ„์—์„œ ๋งŒ๋“  class์™€ ํ•จ์ˆ˜๋“ค์„ resnet.py์— ์ €์žฅํ•ด์ฃผ๊ณ  importํ•˜์—ฌ ์‚ฌ์šฉํ•œ๋‹ค. + +```py +import resnet/ + +conv1x1=resnet.conv1x1 +Bottleneck = resnet.Bottleneck +BasicBlock= resnet.BasicBlock +``` + +๊ฐ ๋ธ”๋ก๋“ค์„ ๋จผ์ € ์ •์˜ํ•ด์ค€๋‹ค. + +```py +class ResNet(nn.Module): + + def __init__(self, block, layers, num_classes=1000, zero_init_residual=False): + super(ResNet, self).__init__() + self.inplanes = 16 + self.conv1 = nn.Conv2d(3, 16, kernel_size=3, stride=1, padding=1, + bias=False) + self.bn1 = nn.BatchNorm2d(16) + self.relu = nn.ReLU(inplace=True) + #self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) => ์‚ฌ์ด์ฆˆ ์ž‘์•„์„œ ํ•„์š” ์—†์Œ + + self.layer1 = self._make_layer(block, 16, layers[0], stride=1) + self.layer2 = self._make_layer(block, 32, layers[1], stride=1) + self.layer3 = self._make_layer(block, 64, layers[2], stride=2) + self.layer4 = self._make_layer(block, 128, layers[3], stride=2) + + self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) + self.fc = nn.Linear(128 * block.expansion, num_classes) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + elif isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + + # Zero-initialize the last BN in each residual branch, + # so that the residual branch starts with zeros, and each residual block behaves like an identity. + # This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677 + if zero_init_residual: + for m in self.modules(): + if isinstance(m, Bottleneck): + nn.init.constant_(m.bn3.weight, 0) + elif isinstance(m, BasicBlock): + nn.init.constant_(m.bn2.weight, 0) + + def _make_layer(self, block, planes, blocks, stride=1): + downsample = None + if stride != 1 or self.inplanes != planes * block.expansion: + downsample = nn.Sequential( + conv1x1(self.inplanes, planes * block.expansion, stride), + nn.BatchNorm2d(planes * block.expansion), + ) + + layers = [] + layers.append(block(self.inplanes, planes, stride, downsample)) + self.inplanes = planes * block.expansion + for _ in range(1, blocks): + layers.append(block(self.inplanes, planes)) + + return nn.Sequential(*layers) + + def forward(self, x): + x = self.conv1(x) + #x.shape =[1, 16, 32,32] + x = self.bn1(x) + x = self.relu(x) + #x = self.maxpool(x) + + x = self.layer1(x) + #x.shape =[1, 128, 32,32] + x = self.layer2(x) + #x.shape =[1, 256, 32,32] + x = self.layer3(x) + #x.shape =[1, 512, 16,16] + x = self.layer4(x) + #x.shape =[1, 1024, 8,8] + + x = self.avgpool(x) + x = x.view(x.size(0), -1) + x = self.fc(x) + + return x +``` + +์ •์˜ํ•œ ๋ธ”๋ก๋“ค์„ ์‚ฌ์šฉํ•˜์—ฌ ์•ž์„œ ๊ตฌํ˜„ํ–ˆ๋˜ ๊ฒƒ๊ณผ ๊ฐ™์ด network๋ฅผ ๊ตฌ์„ฑํ•œ๋‹ค. +๋‹ค๋งŒ CIFAR10์˜ ์ด๋ฏธ์ง€ ์‚ฌ์ด์ฆˆ๊ฐ€ imagenet๋ณด๋‹ค ์ž‘๊ธฐ ๋•Œ๋ฌธ์— ๊ทธ์— ๋งž๊ฒŒ ์ฒซ conv์˜ kernel size๋ฅผ 3์œผ๋กœ ์กฐ์ ˆํ•ด์ฃผ๊ณ , pooling์€ ์—†์• ๋ฉฐ, +layer์˜ `inplanes`๋„ (64, 128, 256, 512)์—์„œ (16, 32, 64, 128)๋กœ ๋ฐ”๊ฟ”์ค€๋‹ค. + +์ดํ›„๋กœ ๋‹ค๋ฅธ ์ ์€ ํฌ๊ฒŒ ์—†์–ด visdom์„ ํ†ตํ•œ ๊ฒฐ๊ณผ๋งŒ ํ•œ ๋ฒˆ ํ™•์ธํ•ด ๋ณด๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™์ด 80%์˜ ์ •ํ™•๋„๊นŒ์ง€ ํ•™์Šต์ด ๋œ ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค.(20 epoch ์ง„ํ–‰) + +![Loss(์ขŒ), Accuracy(์šฐ)](/posting_imgs/lab10-6-6.png) + +#### Image Source + +* 20-layer vs. 56-layer plain network, Residual Block, ResNet Models Table, Resnet-34 Architecture: [https://arxiv.org/pdf/1512.03385.pdf](https://arxiv.org/pdf/1512.03385.pdf) +* Bottleneck Bock: [http://icml.cc/2016/tutorials/icml2016_tutorial_deep_residual_networks_kaiminghe.pdf](http://icml.cc/2016/tutorials/icml2016_tutorial_deep_residual_networks_kaiminghe.pdf) \ No newline at end of file diff --git a/_posts/2022-06-04-dlZeroToAll-PyTorch-11-0.markdown b/_posts/2022-06-04-dlZeroToAll-PyTorch-11-0.markdown new file mode 100644 index 00000000000..9b7d0aeaa82 --- /dev/null +++ b/_posts/2022-06-04-dlZeroToAll-PyTorch-11-0.markdown @@ -0,0 +1,58 @@ +--- +title: "๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹ 2 - Lab11-0: RNN intro" +author: Kwon +date: 2022-06-04T00:00:00 +0900 +categories: [pytorch, study] +tags: [rnn] +math: true +mermaid: false +--- + +[๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹](https://deeplearningzerotoall.github.io/season2/lec_pytorch.html) Lab11-0: RNN intro ๊ฐ•์˜๋ฅผ ๋ณธ ํ›„ ๊ณต๋ถ€๋ฅผ ๋ชฉ์ ์œผ๋กœ ์ž‘์„ฑํ•œ ๊ฒŒ์‹œ๋ฌผ์ž…๋‹ˆ๋‹ค. + +*** + +## RNN(Recurrent Neural Network) + +RNN์€ sequential data๋ฅผ ์ž˜ ํ•™์Šตํ•˜๊ธฐ ์œ„ํ•ด ๊ณ ์•ˆ๋œ ๋ชจ๋ธ์ด๋‹ค. Sequential data๋ž€ ๋‹จ์–ด, ๋ฌธ์žฅ์ด๋‚˜ ์‹œ๊ฒŒ์—ด ๋ฐ์ดํ„ฐ์™€ ๊ฐ™์ด ๋ฐ์ดํ„ฐ์˜ ์ˆœ์„œ๋„ ๋ฐ์ดํ„ฐ์˜ ์ผ๋ถ€์ธ ๋ฐ์ดํ„ฐ๋“ค์„ ๋งํ•œ๋‹ค. +์ด๋Ÿฐ ๋ฐ์ดํ„ฐ๋Š” ์ด์ „ ์ˆœ์„œ์˜ ๋ฐ์ดํ„ฐ๋“ค์„ ์ƒ๊ฐํ•˜์—ฌ ํ•™์Šต์„ ํ•˜๋Š” ๊ฒƒ์ด ๋”ํšจ๊ณผ์ ์ผ ๊ฒƒ์ด๋‹ค. +์ด๋ ‡๊ฒŒ ํ•™์Šตํ•˜๊ธฐ ์œ„ํ•ด RNN์€ ๋‹ค์Œ๊ณผ ๊ฐ™์€ ๊ตฌ์กฐ๋กœ ๊ตฌ์„ฑ๋˜์–ด ์žˆ๋‹ค. + +![RNN์˜ ๊ตฌ์กฐ](/posting_imgs/lab11-0-1.png) + +ํ•œ ๋ชจ๋ธ๋กœ ๊ณ„์† ํ•™์Šตํ•˜์ง€๋งŒ ์ด์ „์˜ output์ด ๋‹ค์Œ์˜ input๊ณผ ํ•จ๊ป˜ ๋“ค์–ด๊ฐ€๋Š” ๊ตฌ์กฐ์ด๋‹ค. ์ด๋ ‡๊ฒŒ output์„ ๋‹ค์Œ input์œผ๋กœ ๋ณด๋‚ผ ๋•Œ ์ด ๊ฐ’์„ hidden state๋ผ๊ณ  ํ•˜๋ฉฐ, ์ด๋Ÿฐ ์‹์œผ๋กœ ๋ชจ๋ธ์„ ๊ตฌ์„ฑํ•˜๋ฉด ๊ณผ๊ฑฐ์˜ ๋ฐ์ดํ„ฐ๋ฅผ ๋ฐ˜์˜ํ•˜์—ฌ ํ•™์Šตํ•  ์ˆ˜ ์žˆ๊ฒŒ ๋œ๋‹ค. + +์ด๋•Œ RNN ๋‚ด๋ถ€์˜ actiavtion์€ tanh๋‚˜ sigmoid function์„ ์‚ฌ์šฉํ•œ๋‹ค. ์ง€๊ธˆ๊นŒ์ง€ ๋ด์™”๋˜ network๋“ค์€ ๊ธฐ์šธ๊ธฐ ์†Œ์‹ค ๋ฌธ์ œ ๋•Œ๋ฌธ์— ReLU๋ฅผ ์‚ฌ์šฉํ•ด์•ผ ์„ฑ๋Šฅ์ด ๋” ์ข‹์•„์ง„๋‹ค๊ณ  ํ–ˆ๋˜ ๊ฒƒ ๊ฐ™์€๋ฐ RNN์˜ ๋‚ด๋ถ€์—์„œ๋Š” ์™œ ์‚ฌ์šฉํ•˜์ง€ ์•Š๋Š” ๊ฑธ๊นŒ. +๊ทธ ์ด์œ ๋Š” RNN์˜ ๊ตฌ์กฐ ๋•Œ๋ฌธ์ธ๋ฐ, RNN์€ ๊ฐ™์€ layer๋ฅผ ์—ฌ๋Ÿฌ๋ฒˆ ๋ฐ˜๋ณตํ•˜์—ฌ ํ•™์Šตํ•˜๋Š” ๊ตฌ์กฐ๋ฅผ ๊ฐ€์ง„๋‹ค. ์ด๋•Œ ReLU์™€ ๊ฐ™์€ activation์„ ์‚ฌ์šฉํ•˜๋ฉด 1๋ณด๋‹ค ํฐ ์ˆ˜๊ฐ€ ๋ฐ˜๋ณตํ•˜์—ฌ ๊ณฑํ•ด์ง€๊ธฐ ๋•Œ๋ฌธ์— **๊ธฐ์šธ๊ธฐ ํญ๋ฐœ**์ด ๋ฐœ์ƒํ•  ๊ฐ€๋Šฅ์„ฑ์ด ๋งค์šฐ ๋†’๋‹ค. +๊ทธ๋ž˜์„œ RNN์˜ ๋‚ด๋ถ€์—์„œ ๋งŒํผ์€ ์ ˆ๋Œ€๊ฐ’์ด 1์„ ๋„˜์ง€ ์•Š์€ tanh๋‚˜ sigmoid๋ฅผ ์‚ฌ์šฉํ•˜๋Š” ๊ฒƒ์ด๋‹ค. + +Acivation์„ ํ‘œํ˜„ํ•˜์—ฌ ์กฐ๊ธˆ ๋” ์ž์„ธํžˆ ๋ชจ๋ธ์„ ๋‚˜ํƒ€๋‚ด๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™์€ ๊ตฌ์กฐ๋ฅผ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. + +![](/posting_imgs/lab11-0-2.png) + +์ผ๋ฐ˜ํ™”ํ•˜์—ฌ ์ˆ˜์‹์œผ๋กœ ๋‚˜ํƒ€๋‚ด๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™๊ณ  + +\\[h_t=f(h_{t-1}, x_t)\\] + +activation๊ณผ weight๋ฅผ ๋ช…์‹œํ•˜์—ฌ ํ‘œํ˜„ํ•˜๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. + +\\[h_t=tanh(W_h h_{t-1}, W_x x_t)\\] + +*** + +## Usages of RNN + +์ด๋Ÿฐ RNN์˜ ๊ตฌ์กฐ๋ฅผ ์‘์šฉํ•˜์—ฌ ๋‹ค์Œ๊ณผ ๊ฐ™์€ ๊ตฌ์กฐ๋“ค๋กœ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ๋‹ค. + +![Usages of RNN](/posting_imgs/lab11-0-3.png) + +* **one to many** +: ํ•˜๋‚˜์˜ ์ž…๋ ฅ์„ ๋ฐ›์•„ ์—ฌ๋Ÿฌ ์ถœ๋ ฅ์„ ๋‚ด๋Š” ๊ตฌ์กฐ์ด๋‹ค. ํ•˜๋‚˜์˜ ์ด๋ฏธ์ง€๋ฅผ ๋ฐ›์•„ ๊ทธ์— ๋Œ€ํ•œ ์„ค๋ช…์„ ๋ฌธ์žฅ(์—ฌ๋Ÿฌ๊ฐœ์˜ ๋‹จ์–ด)์œผ๋กœ ์ถœ๋ ฅํ•˜๋Š” ๊ฒƒ์„ ์—๋กœ ๋“ค ์ˆ˜ ์žˆ๋‹ค. + +* **many to one** +: ์—ฌ๋Ÿฌ ์ž…๋ ฅ์„ ๋ฐ›์•„ ํ•˜๋‚˜์˜ ์ถœ๋Ÿญ์„ ๋‚ด๋Š” ๊ตฌ์กฐ์ด๋‹ค. ๋ฌธ์žฅ์„ ์ž…๋ ฅ๋ฐ›์•„ ๊ทธ ๋ฌธ์žฅ์ด ๋‚˜ํƒ€๋‚ด๋Š” ๊ฐ์ •์˜ label์„ ์ถœ๋ ฅํ•˜๋Š” ๊ฒƒ์„ ์˜ˆ๋กœ ๋“ค ์ˆ˜ ์žˆ๋‹ค. + +* **many to many** +: 2๊ฐ€์ง€์˜ ๊ตฌ์กฐ๊ฐ€ ์žˆ๋Š” ๊ฒƒ์„ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. + - ์ž…๋ ฅ์ด ๋‹ค ๋๋‚˜๋Š” ์ง€์ ๋ถ€ํ„ฐ ์—ฌ๋Ÿฌ ์ถœ๋ ฅ์„ ๋‚ด๋Š” ๊ตฌ์กฐ๋กœ, ๋ฌธ์žฅ์„ ์ž…๋ ฅ๋ฐ›์•„ ๋ฒˆ์—ญํ•˜๋Š” ๋ชจ๋ธ์„ ์˜ˆ๋กœ ๋“ค ์ˆ˜ ์žˆ๋‹ค. ์ด ๊ฒฝ์šฐ ๋ฌธ์žฅ์˜ ์ค‘๊ฐ„์— ๋ฒˆ์—ญ์„ ์ง„ํ–‰ํ•˜๋ฉด ๋‹ค ๋๋‚˜๊ณ  ๋‚˜์„œ ๋ฌธ์žฅ์˜ ์˜๋ฏธ๊ฐ€ ๋‹ฌ๋ผ์งˆ ์ˆ˜ ์žˆ๊ธฐ ๋•Œ๋ฌธ์— ๋จผ์ € ์ž…๋ ฅ ๋ฌธ์žฅ์„ ๋‹ค ๋“ฃ๊ณ  ๋ฒˆ์—ญ์„ ์ง„ํ–‰ํ•˜๊ฒŒ ๋œ๋‹ค. + - ์ž…๋ ฅ ํ•˜๋‚˜ํ•˜๋‚˜๋ฅผ ๋ฐ›์œผ๋ฉด์„œ ๊ทธ๋•Œ๋งˆ๋‹ค ๋ชจ๋ธ์˜ ์ถœ๋ ฅ์„ ๋‚ด๋Š” ๊ตฌ์กฐ์ด๋‹ค. ์˜์ƒ์„ ์ฒ˜๋ฆฌํ•  ๋•Œ frame ๋‹จ์œ„์˜ ์ด๋ฏธ์ง€๋กœ ๋‚˜๋ˆ  ์ž…๋ ฅ์„ ๋ฐ›์€ ํ›„ ๊ฐ frame์„ ์ž…๋ ฅ ๋ฐ›์„ ๋•Œ๋งˆ๋‹ค ์ฒ˜๋ฆฌํ•˜๋Š” ๊ฒƒ์„ ์˜ˆ๋กœ ๋“ค ์ˆ˜ ์žˆ๋‹ค. \ No newline at end of file diff --git a/_posts/2022-06-06-dlZeroToAll-PyTorch-11-1.markdown b/_posts/2022-06-06-dlZeroToAll-PyTorch-11-1.markdown new file mode 100644 index 00000000000..80fec07b48f --- /dev/null +++ b/_posts/2022-06-06-dlZeroToAll-PyTorch-11-1.markdown @@ -0,0 +1,95 @@ +--- +title: "๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹ 2 - Lab11-1: RNN Baisics" +author: Kwon +date: 2022-06-06T00:00:00 +0900 +categories: [pytorch, study] +tags: [rnn] +math: true +mermaid: false +--- + +[๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹](https://deeplearningzerotoall.github.io/season2/lec_pytorch.html) Lab11-1: RNN Baisics ๊ฐ•์˜๋ฅผ ๋ณธ ํ›„ ๊ณต๋ถ€๋ฅผ ๋ชฉ์ ์œผ๋กœ ์ž‘์„ฑํ•œ ๊ฒŒ์‹œ๋ฌผ์ž…๋‹ˆ๋‹ค. + +*** + +## with PyTorch + +PyTorch์—์„œ RNN์€ in/output size๋งŒ ์ž˜ ๋งž์ถฐ์ฃผ๋ฉด ๋ฐ”๋กœ ์‚ฌ์šฉ์ด ๊ฐ€๋Šฅํ•˜๋‹ค. +**"h, e, l, o"** 4๊ฐœ์˜ ์•ŒํŒŒ๋ฒณ์œผ๋กœ ์ด๋ฃจ์–ด์ง„ ๋ฐ์ดํ„ฐ์…‹์„ ํ†ตํ•ด 2์ฐจ์›์˜ output(class๊ฐ€ 2๊ฐœ)์„ ๋‚ด๋Š” RNN์„ ๋งŒ๋“ค์–ด๋ณผ ๊ฒƒ์ด๋‹ค. + +```py +import torch +import numpy as np + +torch.manual_seed(0) + +input_size = 4 +hidden_size = 2 + +# one-hot encoding +h = [1, 0, 0, 0] +e = [0, 1, 0, 0] +l = [0, 0, 1, 0] +o = [0, 0, 0, 1] +input_data_np = np.array([[h, e, l, l, o], [e, o, l, l, l], [l, l, e, e, l]], dtype=np.float32) + +# transform as torch tensor +input_data = torch.Tensor(input_data_np) +``` + +์œ„์™€ ๊ฐ™์ด one-hot encodingํ•˜์—ฌ ๋ฐ์ดํ„ฐ๋ฅผ ๋งŒ๋“ค๋“ค๊ณ  Tensor๋กœ ๋ฐ”๊ฟ”์ค€๋‹ค. +์•ŒํŒŒ๋ฒณ์— ๊ทธ๋ƒฅ ์ˆซ์ž(index)๋ฅผ ๋ถ™์—ฌ ์‚ฌ์šฉํ•˜์ง€ ์•Š๊ณ  ๊ตณ์ด one-hot encoding์„ ํ•˜๋Š” ์ด์œ ๋Š” ์ˆซ์ž ํฌ๊ธฐ์— ๋”ฐ๋ผ network๊ฐ€ ์˜๋ฏธ๋ฅผ ๋ถ€์—ฌํ•  ์ˆ˜ ์žˆ๊ธฐ ๋•Œ๋ฌธ์ด๋‹ค. +์‹ค์ œ๋กœ๋Š” ๊ทธ์ € ์•ŒํŒŒ๋ฒณ์ผ ๋ฟ์ด์ง€๋งŒ ๋” ํฐ ์ˆซ์ž๋ฅผ ํ• ๋‹น ๋ฐ›์€ ์•ŒํŒŒ๋ฒณ์„ ๋” ์ค‘์š”ํ•˜๊ฒŒ ์ƒ๊ฐํ•˜๋ฉด์„œ ํ•™์Šตํ•  ์ˆ˜ ์žˆ๋‹ค๋Š” ๊ฒƒ์ด๋‹ค. + +๋‹ค์‹œ ๋Œ์•„์˜ค๋ฉด ์•ŒํŒŒ๋ฒณ์˜ ์ข…๋ฅ˜๊ฐ€ 4๊ฐ€์ง€์ด๊ธฐ ๋•Œ๋ฌธ์— input data์˜ ํ•œ ์ฐจ์›์€ 4๋กœ shape์ด `(-, -, 4)`๊ฐ€ ๋˜๊ณ , +๋‹จ์–ด์˜ ๊ธธ์ด(**sequence length**)๊ฐ€ 5์ด๋ฏ€๋กœ shape์€ `(-, 5, 4),` +๋งˆ์ง€๋ง‰์œผ๋กœ data์˜ ๊ฐœ์ˆ˜๊ฐ€ 3๊ฐœ(**batch size**)์ด๊ธฐ ๋•Œ๋ฌธ์— shape์ด `(3, 5, 4)`๊ฐ€ ๋œ๋‹ค. + +๋‹ค์Œ์œผ๋กœ RNN layer๋ฅผ ๋งŒ๋“ค ์ฐจ๋ก€์ด๋‹ค. `torch.nn.RNN`์„ ํ†ตํ•ด RNN layer๋ฅผ ๋งŒ๋“ค ์ˆ˜ ์žˆ์œผ๋ฉฐ ์ด๋•Œ in/output size๋ฅผ ์ง€์ •ํ•ด์ฃผ์–ด์•ผ ํ•œ๋‹ค. +input size์€ ์•ŒํŒŒ๋ฒณ์˜ ์ข…๋ฅ˜(4)๊ฐ€ ๋˜๊ณ  output size๋Š” ์šฐ๋ฆฌ๊ฐ€ ์›ํ•˜๋Š” class์˜ ๊ฐœ์ˆ˜(2)๊ฐ€ ๋œ๋‹ค. +sequence length์™€ abtch size๋Š” input๊ณผ ๋™์ผํ•˜๋ฉฐ, data๋งŒ ์ž˜ ๋งŒ๋“ค์–ด์„œ ๋„ฃ์–ด์คฌ๋‹ค๋ฉด PyTorch์—์„œ ์•Œ์•„์„œ ์ฒ˜๋ฆฌํ•ด์ฃผ๊ธฐ ๋•Œ๋ฌธ์— ๋”ฐ๋กœ ์ž…๋ ฅํ•  ํ•„์š”๊ฐ€ ์—†๋‹ค. + +```py +rnn = torch.nn.RNN(input_size, hidden_size) + +outputs, _status = rnn(input_data) +print(outputs) +print(outputs.size()) + +'''output +tensor([[[-0.7497, -0.6135], + [-0.5282, -0.2473], + [-0.9136, -0.4269], + [-0.9136, -0.4269], + [-0.9028, 0.1180]], + + [[-0.5753, -0.0070], + [-0.9052, 0.2597], + [-0.9173, -0.1989], + [-0.9173, -0.1989], + [-0.8996, -0.2725]], + + [[-0.9077, -0.3205], + [-0.8944, -0.2902], + [-0.5134, -0.0288], + [-0.5134, -0.0288], + [-0.9127, -0.2222]]], grad_fn=) +torch.Size([3, 5, 2]) +''' +``` + +ํ•œ ๊ฐ€์ง€ ์ด์ƒํ•˜๋‹ค๊ณ  ์ƒ๊ฐํ•  ์ˆ˜ ์žˆ๋Š” ๋ถ€๋ถ„์ด ์žˆ๋‹ค. ๋ถ„๋ช… output size์„ ์ž…๋ ฅํ•œ๋‹ค๊ณ  ํ–ˆ๋Š”๋ฐ `hidden_size`๋ผ๊ณ  ์ •์˜ํ•˜์—ฌ ๋„ฃ์—ˆ๋‹ค. +์ด๋Š” RNN์˜ ๋‚ด๋ถ€ ๊ตฌ์กฐ๋ฅผ ๋ณด๋ฉด ์•Œ ์ˆ˜ ์žˆ๋‹ค. + +![](/posting_imgs/lab11-1-1.png) + +๋นจ๊ฐ„ ๋ฐ•์Šค ๋ถ€๋ถ„์„ ๋ณด๋ฉด hidden์œผ๋กœ ๋„˜์–ด๊ฐ€๋Š” ๋ถ€๋ถ„๊ณผ output์œผ๋กœ ๋‚˜๊ฐ€๋Š” data๊ฐ€ ๊ฒฐ๊ตญ ๊ฐ™์€ data์—์„œ ๋‚˜๋ˆ ์ง€๋Š” ๊ฒƒ์„ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. +๊ทธ๋Ÿฌ๋ฏ€๋กœ hidden size์™€ output size๋Š” ๊ฐ™๋‹ค. ์ด ๋•Œ๋ฌธ์— `hidden_size`๋ผ๊ณ  ์ •์˜ํ•˜์—ฌ ๋„ฃ์€ ๊ฒƒ์ด๋‹ค. + +### Data shape in RNN + +์•ž์„œ ๋‚˜์˜จ shape์„ ์ •๋ฆฌํ•ด ๋ณด๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. + +![](/posting_imgs/lab11-1-2.png) + +(batch_size, sequence_length, dimension) ์ˆœ์œผ๋กœ ๋ฐ์ดํ„ฐ๊ฐ€ ๊ตฌ์„ฑ๋˜๋ฉฐ, ์•ž์„œ ์–ธ๊ธ‰ํ•œ ๋Œ€๋กœ in/output size๋งŒ ์ž˜ ๋„ฃ์–ด์ฃผ๋ฉด ๋‚˜๋จธ์ง€๋Š” PyTorch๊ฐ€ ๋ฐ์ดํ„ฐ์— ๋งž๊ฒŒ ์ฒ˜๋ฆฌํ•ด์ค€๋‹ค. \ No newline at end of file diff --git a/_posts/2022-06-06-dlZeroToAll-PyTorch-11-2.markdown b/_posts/2022-06-06-dlZeroToAll-PyTorch-11-2.markdown new file mode 100644 index 00000000000..4faba9bdd34 --- /dev/null +++ b/_posts/2022-06-06-dlZeroToAll-PyTorch-11-2.markdown @@ -0,0 +1,167 @@ +--- +title: "๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹ 2 - Lab11-2: RNN - hihello / charseq" +author: Kwon +date: 2022-06-06T01:00:00 +0900 +categories: [pytorch, study] +tags: [rnn] +math: true +mermaid: false +--- + +[๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹](https://deeplearningzerotoall.github.io/season2/lec_pytorch.html) Lab11-2: RNN - hihello / charseq ๊ฐ•์˜๋ฅผ ๋ณธ ํ›„ ๊ณต๋ถ€๋ฅผ ๋ชฉ์ ์œผ๋กœ ์ž‘์„ฑํ•œ ๊ฒŒ์‹œ๋ฌผ์ž…๋‹ˆ๋‹ค. + +*** + +## 'hihello' problem + +hihello ๋ฌธ์ œ๋Š” ๊ฐ™์€ ๋ฌธ์ž๋“ค์ด ๋‹ค์Œ ๋ฌธ์ž๊ฐ€ ๋‹ค๋ฅธ ๊ฒฝ์šฐ ์ด๋ฅผ ์˜ˆ์ธกํ•˜๋Š” ๋ฌธ์ œ๋ฅผ ๋งํ•œ๋‹ค. +hihello์—์„œ 'h'์™€ 'l'์€ 2๋ฒˆ์”ฉ ๋“ฑ์žฅํ•˜์ง€๋งŒ ์–ด๋””์— ๋ฌธ์ž๊ฐ€ ์œ„์น˜ํ•˜๋А๋ƒ์— ๋”ฐ๋ผ ๋‹ค์Œ์— ์˜ฌ ๋ฌธ์ž๊ฐ€ ๋‹ฌ๋ผ์ง„๋‹ค. +์ด๋Ÿฐ ๊ฒฝ์šฐ๊ฐ€ RNN์˜ **hidden state**๊ฐ€ ๋น›์„ ๋ฐœํœ˜ํ•˜๋Š” ๊ฒฝ์šฐ์ด๋‹ค. ์•ŒํŒŒ๋ฒณ ๋งŒ์œผ๋กœ ํŒ๋ณ„ํ•  ์ˆ˜ ์—†์ง€๋งŒ ์ˆœ์„œ๋ฅผ ๊ธฐ์–ตํ•˜์—ฌ ๋’ท ๋ฌธ์ž๋ฅผ ์˜ˆ์ธกํ•˜๋Š”๋ฐ ๋„์›€์ด ๋˜๊ธฐ ๋•Œ๋ฌธ์ด๋‹ค. + +### with Code + +[lab11-1](/posts/dlZeroToAll-PyTorch-11-1/)์˜ ์ฝ”๋“œ๋ฅผ ํ™•์žฅํ•˜์—ฌ ์ผ๋ฐ˜ํ™”ํ•œ ๊ฒƒ์ด๋‹ค. +์ „์ฒด์ ์ธ ๊ตฌ์กฐ๋Š” ๊ฑฐ์˜ ๋™์ผํ•˜๊ธฐ ๋•Œ๋ฌธ์— ์ถ”๊ฐ€๋œ ๋ถ€๋ถ„์— ์ดˆ์ ์„ ๋งž์ถฐ ์‚ดํŽด๋ณด๋ ค ํ•œ๋‹ค. + +```py +char_set = ['h', 'i', 'e', 'l', 'o'] + +# hyper parameters +input_size = len(char_set) +hidden_size = len(char_set) +learning_rate = 0.1 + +# data setting +x_data = [[0, 1, 0, 2, 3, 3]] +x_one_hot = [[[1, 0, 0, 0, 0], + [0, 1, 0, 0, 0], + [1, 0, 0, 0, 0], + [0, 0, 1, 0, 0], + [0, 0, 0, 1, 0], + [0, 0, 0, 1, 0]]] +y_data = [[1, 0, 2, 3, 3, 4]] + +X = torch.FloatTensor(x_one_hot) +Y = torch.LongTensor(y_data) +``` + +๋งˆ์ฐฌ๊ฐ€์ง€๋กœ one-hot encodingํ•˜์—ฌ Tensor๋กœ ๋ฐ”๊พผ๋‹ค. ๋‹ค๋งŒ ๊ฐ ์•ŒํŒŒ๋ฒณ ๋ณ€์ˆ˜์— ๋ฐฐ์—ด์„ ์ €์žฅํ•˜๋Š” ๋ฐฉ์‹์ด ์•„๋‹ˆ๋ผ `char_set`์— ์ €์žฅ๋œ ์•ŒํŒŒ๋ฒณ์„ `x_data`์˜ ๊ฐ’์„ ์ธ๋ฑ์Šค๋กœ ๋ถˆ๋Ÿฌ์˜ค๋Š” ๋ฐฉ์‹์ด๋‹ค. +one-hot encoding์€ `x_data`์— ์ ์šฉํ•˜์—ฌ ํ•™์Šตํ•œ๋‹ค. + +๋ฐ์ดํ„ฐ๋ฅผ ์ž์„ธํžˆ ๋ณด๋ฉด input(x)์€ ๋งˆ์ง€๋ง‰ ๋ฌธ์ž๊ฐ€ ์—†๊ณ  target(y)์€ ์ฒซ ๋ฌธ์ž๊ฐ€ ์—†๋Š” ๊ฒƒ์„ ๋ณผ ์ˆ˜ ์žˆ๋Š”๋ฐ, +์ด๊ฑด ๊ฐ ์ฐจ์‹œ์˜ RNN์ด ๋‹ค์Œ ๋ฌธ์ž๋ฅผ ์ถœ๋ ฅํ•˜๊ธฐ ๋•Œ๋ฌธ์ด๋‹ค. input์— + +๋‹ค์Œ์€ ๋ชจ๋ธ๊ณผ loss, optimizer๋ฅผ ๋งŒ๋“ค์–ด์ค€๋‹ค. ๋งˆ์ฐฌ๊ฐ€์ง€๋กœ `torch.nn.RNN`๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ์ •์˜ํ•œ๋‹ค. + +```py +rnn = torch.nn.RNN(input_size, hidden_size, batch_first=True) # batch_first guarantees the order of output = (B, S, F) + +# loss & optimizer setting +criterion = torch.nn.CrossEntropyLoss() +optimizer = optim.Adam(rnn.parameters(), learning_rate) +``` + +```py +# start training +for i in range(100): + optimizer.zero_grad() + outputs, _status = rnn(X) + loss = criterion(outputs.view(-1, input_size), Y.view(-1)) + loss.backward() + optimizer.step() + + result = outputs.data.numpy().argmax(axis=2) + result_str = ''.join([char_set[c] for c in np.squeeze(result)]) + print(i, "loss: ", loss.item(), "prediction: ", result, "true Y: ", y_data, "prediction str: ", result_str) + +'''output +0 loss: 1.7802648544311523 prediction: [[1 1 1 1 1 1]] true Y: [[1, 0, 2, 3, 3, 4]] prediction str: iiiiii +1 loss: 1.4931954145431519 prediction: [[1 4 1 1 4 4]] true Y: [[1, 0, 2, 3, 3, 4]] prediction str: ioiioo +2 loss: 1.3337129354476929 prediction: [[1 3 2 3 1 4]] true Y: [[1, 0, 2, 3, 3, 4]] prediction str: ilelio +3 loss: 1.215295433998108 prediction: [[2 3 2 3 3 3]] true Y: [[1, 0, 2, 3, 3, 4]] prediction str: elelll +4 loss: 1.1131411790847778 prediction: [[2 3 2 3 3 3]] true Y: [[1, 0, 2, 3, 3, 4]] prediction str: elelll +5 loss: 1.0241888761520386 prediction: [[2 3 2 3 3 4]] true Y: [[1, 0, 2, 3, 3, 4]] prediction str: elello +6 loss: 0.9573155045509338 prediction: [[2 3 2 3 3 4]] true Y: [[1, 0, 2, 3, 3, 4]] prediction str: elello +7 loss: 0.9102011322975159 prediction: [[2 0 2 3 3 4]] true Y: [[1, 0, 2, 3, 3, 4]] prediction str: ehello +... +96 loss: 0.5322802066802979 prediction: [[1 3 2 3 3 4]] true Y: [[1, 0, 2, 3, 3, 4]] prediction str: ilello +97 loss: 0.5321123003959656 prediction: [[1 3 2 3 3 4]] true Y: [[1, 0, 2, 3, 3, 4]] prediction str: ilello +98 loss: 0.5319531559944153 prediction: [[1 3 2 3 3 4]] true Y: [[1, 0, 2, 3, 3, 4]] prediction str: ilello +99 loss: 0.5317898392677307 prediction: [[1 3 2 3 3 4]] true Y: [[1, 0, 2, 3, 3, 4]] prediction str: ilello +``` + +ํ•™์Šต์„ ์ง„ํ–‰ํ•˜๋Š” ๊ฒƒ์— ํฌ๊ฒŒ ํŠน์ดํ•œ ์ ์€ ์—†๊ณ  ๊ฒฐ๊ณผ๋ฅผ ๋‚ผ๋•Œ `argmax`๋ฅผ ํ†ตํ•ด one-hot vector๋ฅผ index ๊ฐ’์œผ๋กœ ๋ฐ”๊ฟ”์ค˜์•ผ ํ•œ๋‹ค. ์ด๋ ‡๊ฒŒ ๋ฐ”๊พผ output์€ `''.join([char_set[c] for c in np.squeeze(result)])`๋ฅผ ํ†ตํ•ด ์‹ค์ œ ๋‹จ์–ด๋กœ ๋ฐ”๊ฟ” ์ถœ๋ ฅํ•  ์ˆ˜ ์žˆ๋‹ค. + +๊ฒฐ๊ณผ๋ฅผ ๋ณด๋ฉด ์ฒ˜์Œ์—๋Š” ์ด์ƒํ•œ ๋‹จ์–ด๋“ค์ด ๋‚˜์˜ค๋‹ค๊ฐ€ ๋งˆ์ง€๋ง‰์— ๋‹ค์™€์„œ๋Š” ์ฒซ ๋ฌธ์ž๋ฅผ ์ œ์™ธํ•œ 'ilello'๊ฐ€ ์ œ๋Œ€๋กœ ๋‚˜์˜จ ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. + +*** + +## Charseq + +์ง€๊ธˆ๊นŒ์ง€ ํ–ˆ๋˜ ๊ฒƒ์„ ๋‹ค์‹œ ํ•œ๋ฒˆ ์ผ๋ฐ˜ํ™” ์‹œ์ผœ ์ž„์˜์˜ ๋ฌธ์žฅ๋„ ํ•™์Šตํ•  ์ˆ˜ ์žˆ๋„๋ก ํ•œ๋‹ค. + +### Data + +```py +sample = " if you want you" + +# make dictionary +char_set = list(set(sample)) +char_dic = {c: i for i, c in enumerate(char_set)} +print(char_dic) + +'''output + +{'o': 0, 'n': 1, 'a': 2, ' ': 3, 'w': 4, 'i': 5, 'y': 6, 't': 7, 'u': 8, 'f': 9} +''' +``` + +set์„ ํ†ตํ•ด ์ค‘๋ณต๋œ ๋ฌธ์ž๋ฅผ ์ œ๊ฑฐํ•˜๊ณ , ๋ฌธ์ž์™€ ๋ฌธ์ž์˜ index๋ฅผ ๋‹ด๋Š” dictionary๋ฅผ ๋งŒ๋“ค์–ด ์‚ฌ์šฉํ•œ๋‹ค. + +```py +# hyper parameters +dic_size = len(char_dic) +hidden_size = len(char_dic) +learning_rate = 0.1 + +# data setting +sample_idx = [char_dic[c] for c in sample] +x_data = [sample_idx[:-1]] +x_one_hot = [np.eye(dic_size)[x] for x in x_data] +y_data = [sample_idx[1:]] + +# transform as torch tensor variable +X = torch.FloatTensor(x_one_hot) +Y = torch.LongTensor(y_data) +``` + +one-hot encodeng์„ identity matrix(๋‹จ์œ„ํ–‰๋ ฌ)๋ฅผ ํ†ตํ•ด ์ง„ํ–‰ํ•œ๋‹ค. `np.eye(size)`๋ฅผ ํ†ตํ•ด ๋งŒ๋“ค ์ˆ˜ ์žˆ๋Š” ๋‹จ์œ„ํ–‰๋ ฌ์€ ์ฃผ๋Œ€๊ฐ์„ (์ขŒ์ƒ์šฐํ•˜)์˜ ์›์†Œ๊ฐ€ ๋ชจ๋‘ 1์ด๊ณ  ๋‚˜๋จธ์ง€๋Š” ๋ชจ๋‘ 0์ธ ์ •์‚ฌ๊ฐ ํ–‰๋ ฌ์ด๋‹ค. +์•ž์„œ ๋ฝ‘์•„๋ƒˆ๋˜ ๋ฌธ์ž๋“ค์˜ index๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ๋‹จ์œ„ํ–‰๋ ฌ์˜ ํ•œ ์ค„์„ ๋ฝ‘์•„๋‚ด๋ฉด ๊ทธ๊ฒƒ์ด ๊ณง ํ•ด๋‹น ๋ฌธ์ž์˜ one-hot vetor๊ฐ€ ๋˜๊ธฐ ๋•Œ๋ฌธ์— ์†์‰ฝ๊ฒŒ one-hot encodeng์„ ํ•  ์ˆ˜ ์žˆ๋‹ค. + +์ดํ›„ ๋ฐ์ดํ„ฐ์˜ ๊ธธ์ด์— ๋งž์ถฐ ๊ฐ size๋ฅผ ์ •์˜ํ•˜๊ณ  x์—์„œ๋Š” ๋งจ ๋’ค ๋ฌธ์ž, y์—์„œ๋Š” ๋งจ ์•ž ๋ฌธ์ž๋ฅผ ๋นผ์„œ ํ•™์Šตํ•  ์ˆ˜ ์žˆ๋„๋ก ํ•œ๋‹ค. +๊ทธ๋ฆฌ๊ณ  ๋งŒ๋“ค์–ด์ง„ ๋ฐ์ดํ„ฐ๋ฅผ Tensor๋กœ ๋ฐ”๊ฟ”์ค€๋‹ค. + +### Train Result + +๋ชจ๋ธ๊ณผ ํ•™์Šต์€ ๋‹ค๋ฅด์ง€ ์•Š์œผ๋ฏ€๋กœ ๊ฒฐ๊ณผ๋งŒ ํ•œ๋ฒˆ ์‚ดํŽด๋ณด์ž + +```py +'''output +0 loss: 2.4069371223449707 prediction: [[7 7 0 7 8 5 8 7 8 7 8 0 7 8 5]] true Y: [[5, 9, 3, 6, 0, 8, 3, 4, 2, 1, 7, 3, 6, 0, 8]] prediction str: ttotuiututuotui +1 loss: 2.1236345767974854 prediction: [[1 0 0 1 0 8 0 1 8 8 8 1 1 0 8]] true Y: [[5, 9, 3, 6, 0, 8, 3, 4, 2, 1, 7, 3, 6, 0, 8]] prediction str: noonouonuuunnou +2 loss: 1.8809428215026855 prediction: [[6 0 3 6 0 8 3 6 0 8 7 3 6 0 8]] true Y: [[5, 9, 3, 6, 0, 8, 3, 4, 2, 1, 7, 3, 6, 0, 8]] prediction str: yo you yout you +3 loss: 1.71848464012146 prediction: [[6 0 3 6 0 8 3 6 4 5 7 3 6 0 8]] true Y: [[5, 9, 3, 6, 0, 8, 3, 4, 2, 1, 7, 3, 6, 0, 8]] prediction str: yo you ywit you +4 loss: 1.5743740797042847 prediction: [[6 0 3 6 0 8 3 6 2 5 7 3 6 0 8]] true Y: [[5, 9, 3, 6, 0, 8, 3, 4, 2, 1, 7, 3, 6, 0, 8]] prediction str: yo you yait you +5 loss: 1.4554158449172974 prediction: [[6 9 3 6 0 8 3 6 8 5 7 3 6 0 8]] true Y: [[5, 9, 3, 6, 0, 8, 3, 4, 2, 1, 7, 3, 6, 0, 8]] prediction str: yf you yuit you +6 loss: 1.3661972284317017 prediction: [[5 9 3 6 0 8 3 6 2 5 7 3 6 2 8]] true Y: [[5, 9, 3, 6, 0, 8, 3, 4, 2, 1, 7, 3, 6, 0, 8]] prediction str: if you yait yau +7 loss: 1.2864983081817627 prediction: [[5 9 3 6 2 8 3 6 2 1 7 3 6 2 8]] true Y: [[5, 9, 3, 6, 0, 8, 3, 4, 2, 1, 7, 3, 6, 0, 8]] prediction str: if yau yant yau +8 loss: 1.2224119901657104 prediction: [[5 9 3 6 2 8 3 6 2 1 7 3 6 2 8]] true Y: [[5, 9, 3, 6, 0, 8, 3, 4, 2, 1, 7, 3, 6, 0, 8]] prediction str: if yau yant yau +... +46 loss: 0.8302408456802368 prediction: [[5 9 3 6 0 8 3 4 2 1 7 3 6 0 8]] true Y: [[5, 9, 3, 6, 0, 8, 3, 4, 2, 1, 7, 3, 6, 0, 8]] prediction str: if you want you +47 loss: 0.8290660381317139 prediction: [[5 9 3 6 0 8 3 4 2 1 7 3 6 0 8]] true Y: [[5, 9, 3, 6, 0, 8, 3, 4, 2, 1, 7, 3, 6, 0, 8]] prediction str: if you want you +48 loss: 0.8275652527809143 prediction: [[5 9 3 6 0 8 3 4 2 1 7 3 6 0 8]] true Y: [[5, 9, 3, 6, 0, 8, 3, 4, 2, 1, 7, 3, 6, 0, 8]] prediction str: if you want you +49 loss: 0.8264601230621338 prediction: [[5 9 3 6 0 8 3 4 2 1 7 3 6 0 8]] true Y: [[5, 9, 3, 6, 0, 8, 3, 4, 2, 1, 7, 3, 6, 0, 8]] prediction str: if you want you +''' +``` + +๋งˆ์ฐฌ๊ฐ€์ง€๋กœ ํ•™์Šต์˜ ๋ง‰๋ฐ”์ง€๋กœ ๊ฐˆ์ˆ˜๋ก ํ•™์Šตํ–ˆ๋˜ ๋ฌธ์žฅ์ด ์ž˜ ๋‚˜์˜ค๋Š” ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. \ No newline at end of file diff --git a/_posts/2022-06-07-dlZeroToAll-PyTorch-11-3.markdown b/_posts/2022-06-07-dlZeroToAll-PyTorch-11-3.markdown new file mode 100644 index 00000000000..62b6f389bd7 --- /dev/null +++ b/_posts/2022-06-07-dlZeroToAll-PyTorch-11-3.markdown @@ -0,0 +1,201 @@ +--- +title: "๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹ 2 - Lab11-3: RNN - longseq" +author: Kwon +date: 2022-06-07T00:00:00 +0900 +categories: [pytorch, study] +tags: [rnn] +math: true +mermaid: false +--- + +[๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹](https://deeplearningzerotoall.github.io/season2/lec_pytorch.html) Lab11-3: RNN - hihello / charseq ๊ฐ•์˜๋ฅผ ๋ณธ ํ›„ ๊ณต๋ถ€๋ฅผ ๋ชฉ์ ์œผ๋กœ ์ž‘์„ฑํ•œ ๊ฒŒ์‹œ๋ฌผ์ž…๋‹ˆ๋‹ค. + +*** + +## Longseq + +์•ž์„œ ์‚ดํŽด๋ณด์•˜๋˜ RNN ์˜ˆ์ œ๋“ค์€ ๋ชจ๋‘ ํ•œ ๋‹จ์–ด๋‚˜ ์งง์€ ๋ฌธ์žฅ์— ๋Œ€ํ•ด RNN์„ ํ•™์Šต์‹œํ‚ค๋Š” ๋‚ด์šฉ๋“ค์ด์—ˆ๋‹ค. +ํ•˜์ง€๋งŒ ์šฐ๋ฆฌ๊ฐ€ ๋‹ค๋ฃจ๊ณ  ์‹ถ์€ ๋ฐ์ดํ„ฐ๋Š” ๋” ๊ธด ๋ฌธ์žฅ์ด๊ฑฐ๋‚˜ ๋‚ด์šฉ์„ ๊ฐ€์งˆ ๊ฐ€๋Šฅ์„ฑ์ด ๋†’๋‹ค. +์ด๋Ÿฐ ์ƒํ™ฉ์—์„œ๋Š” ๊ทธ ๋ฐ์ดํ„ฐ ์ „์ฒด๋ฅผ ๋„ฃ์–ด RNN์„ ํ•™์Šต์‹œํ‚ค๊ธฐ์—๋Š” ๋“ค์–ด๊ฐ€๋Š” ๋ฐ์ดํ„ฐ์˜ ๊ธธ์ด๋„ ๋งค๋ฒˆ ๋‹ค๋ฅผ ๋ฟ๋”๋Ÿฌ ๊ทธ ํฌ๊ธฐ๊ฐ€ ๋„ˆ๋ฌด ์ปค์„œ ํ•™์Šต์ด ๋ถˆ๊ฐ€๋Šฅํ•  ์ˆ˜๋„ ์žˆ๋‹ค. +๊ทธ๋ž˜์„œ ์ผ์ •ํ•œ ํฌ๊ธฐ์˜ window๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ RNN์— ์ž˜๋ผ์„œ ๋„ฃ์–ด์ค€๋‹ค. + +์•„๋ž˜ ๋ฌธ์žฅ์„ ๋ณด์ž. + +```py +sentence = ("if you want to build a ship, don't drum up people together to ") +``` + +์ด ๋ฌธ์žฅ์˜ ๋งˆ์ง€๋ง‰ ๋ฌธ์ž๋งŒ ์ž˜๋ผ์„œ ๋ฐ”๋กœ ๋„ฃ๊ธฐ ๋ณด๋‹ค๋Š” ํฌ๊ธฐ 10์˜ window๋กœ ์ž˜๋ผ ๋„ฃ์œผ๋ ค๊ณ  ํ•œ๋‹ค. +์ด๋•Œ window๋ฅผ ์˜ค๋ฅธ์ชฝ์œผ๋กœ ํ•œ์นธ์”ฉ ๋ฐ€์–ด๊ฐ€๋ฉด์„œ data๋ฅผ ๋งŒ๋“ ๋‹ค. ์œ„ ๋ฌธ์žฅ์„ ์ž๋ฅด๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. + +``` + x_data -> y_data + +"if you wan" -> "f you want" +"f you want" -> " you want " +" you want " -> "you want t" +"you want t" -> "ou want to" +"ou want to" -> "u want to " +``` + +์ด๋ ‡๊ฒŒ ํ•˜๋ฉด ์ผ์ •ํ•œ ํฌ๊ธฐ๋กœ ๋ฐ์ดํ„ฐ๋ฅผ ์ž˜๋ผ ํ•™์Šต์„ ํ•  ์ˆ˜ ์žˆ๋‹ค. (x_data๋กœ y_data๋ฅผ ํ•™์Šตํ•˜์—ฌ ์˜ˆ์ธก) + +*** + +## with Code + +### Imports + +```py +import torch +import torch.optim as optim +import numpy as np + +torch.manual_seed(0) +``` + +### Data + +์•ž์„œ ๋ณธ ๋ฌธ์žฅ๊ณผ ๋”๋ถˆ์–ด ์ด 3๊ฐœ์˜ ๋ฌธ์žฅ์„ ํ•™์Šต์— ์‚ฌ์šฉํ•ด ๋ณธ๋‹ค. + +```py +sentence = ("if you want to build a ship, don't drum up people together to " + "collect wood and don't assign them tasks and work, but rather " + "teach them to long for the endless immensity of the sea.") +``` + +[lab11-2](/posts/dlZeroToAll-PyTorch-11-2/)์˜ charseq ์˜ˆ์ œ์—์„œ ๋ดค๋˜ ๊ฒƒ๊ณผ ๊ฐ™์€ ๋ฐฉ๋ฒ•์œผ๋กœ one-hot encoding์— ์‚ฌ์šฉํ•  dictionary๋ฅผ ์ƒ์„ฑํ•œ๋‹ค. + +```py +# make dictionary +char_set = list(set(sentence)) +char_dic = {c: i for i, c in enumerate(char_set)} + +# hyper parameters +dic_size = len(char_dic) +hidden_size = len(char_dic) +sequence_length = 10 # Any arbitrary number +learning_rate = 0.1 +``` + +์ดํ›„ window๋ฅผ ์ด์šฉํ•˜์—ฌ ์ž๋ฅด๋Š” ๋ฐฉ์‹์œผ๋กœ data๋ฅผ ๋งŒ๋“ค๊ณ  one-hot encoding ํ•œ๋‹ค. + +```py +# data setting +x_data = [] +y_data = [] + +# window๋ฅผ ์˜ค๋ฅธ์ชฝ์œผ๋กœ ์›€์ง์ด๋ฉด์„œ ์ž๋ฆ„ +for i in range(0, len(sentence) - sequence_length): + x_str = sentence[i:i + sequence_length] + y_str = sentence[i + 1: i + sequence_length + 1] + print(i, x_str, '->', y_str) + + x_data.append([char_dic[c] for c in x_str]) # x str to index (dict ์‚ฌ์šฉ) + y_data.append([char_dic[c] for c in y_str]) # y str to index + +x_one_hot = [np.eye(dic_size)[x] for x in x_data] + +X = torch.FloatTensor(x_one_hot) +Y = torch.LongTensor(y_data) + +'''output +0 if you wan -> f you want +1 f you want -> you want +2 you want -> you want t +3 you want t -> ou want to +4 ou want to -> u want to +... +166 ty of the -> y of the s +167 y of the s -> of the se +168 of the se -> of the sea +169 of the sea -> f the sea. +''' +``` + +### Model + +๋ฌธ์žฅ์ด ๋” ๊ธธ๊ณ  ๋ณต์žกํ•˜๊ธฐ ๋•Œ๋ฌธ์— ๊ธฐ์กด ํ•œ ์ธต์˜ RNN์œผ๋กœ๋Š” ํ•™์Šต์ด ์ž˜ ์•ˆ ๋  ์ˆ˜ ์žˆ๋‹ค. ๊ทธ๋ž˜์„œ RNN ์ธต์„ ๋” ์Œ“๊ณ  ๋งˆ์ง€๋ง‰์— fully connected layer๋ฅผ ์—ฐ๊ฒฐํ•˜์—ฌ ๋” ๋ณต์žกํ•œ ๋ชจ๋ธ์„ ๋งŒ๋“ค์–ด ์‚ฌ์šฉํ•ด ๋ณผ ๊ฒƒ์ด๋‹ค. + +![](/posting_imgs/lab11-3-1.png) + +```py +# declare RNN + FC +class Net(torch.nn.Module): + def __init__(self, input_dim, hidden_dim, layers): + super(Net, self).__init__() + self.rnn = torch.nn.RNN(input_dim, hidden_dim, num_layers=layers, batch_first=True) + self.fc = torch.nn.Linear(hidden_dim, hidden_dim, bias=True) + + def forward(self, x): + x, _status = self.rnn(x) + x = self.fc(x) + return x + +net = Net(dic_size, hidden_size, 2) +``` + +`torch.nn.RNN`์„ ์ด์šฉํ•˜์—ฌ RNN์„ ์ƒ์„ฑํ•  ๋•Œ `num_layers`๋ฅผ layer ์ˆ˜๋งŒํผ ์„ค์ •ํ•ด ์ฃผ๋Š” ๊ฒƒ์œผ๋กœ ์›ํ•˜๋Š” ์ธต์˜ RNN์„ ์ƒ์„ฑํ•  ์ˆ˜ ์žˆ๋‹ค. +RNN๊ณผ FC๋ฅผ ์ •์˜ํ•˜๊ณ  foward์—์„œ ์—ฐ๊ฒฐ์‹œ์ผœ์ฃผ๋Š” ๋ชจ์Šต์ด๋‹ค. + +### Train + +```py +criterion = torch.nn.CrossEntropyLoss() +optimizer = optim.Adam(net.parameters(), learning_rate) + +# start training +for i in range(100): + optimizer.zero_grad() + outputs = net(X) + loss = criterion(outputs.view(-1, dic_size), Y.view(-1)) + loss.backward() + optimizer.step() + + results = outputs.argmax(dim=2) + predict_str = "" + for j, result in enumerate(results): + # print(i, j, ''.join([char_set[t] for t in result]), loss.item()) + if j == 0: + predict_str += ''.join([char_set[t] for t in result]) + else: + predict_str += char_set[result[-1]] + + print(predict_str) + +'''output +hswmsshsmmmwsisshsmmhwwshhhhwmsssswmhmshhsmshismhhsismshmhmsismisshhswhissmwmmmmhssswwishhhwshmhhsmshsmwwmmhismhmsssmhmshmhmshmhhmshmhhsissimmhsismshmwwmmmhsshhhshmwsmmuismshmwwmm + + t t ttt ttt t ttt t ttt t t t t t t t ttt t t t t t tt t ttt tt t ttt t t tt t t tt t t t t ttt ttt t ttt t tt t tt t tt t t ttt tt t t t t t t t t t ttt + b.o b. o o o o o o o o o o o +e ae as a a aa a a a a a a ata aa aa aa a a a aa a aa aa a a aa a a a aa a aa +e tl ee teeeettlteeetl tleeeee eeeee eleet etteeetleeteeeeeeletleeeeeeeeeeeeteeoeestee eletteeeeeletteeeeteeeeeeeetelteeetleseteleteleeeetteteeoeteeee eeeeleeee eeeeeeteeeeeell e +e to ot oo oot tooo ot ouoto o ootoo ouoou ooootto oootu ootootoo oo ooo ooo ouoo ooo ooooo oouoto otoo uoouo ooo uoooo oto oootoo oooo utoo oo ttot ooo oto ooo ooo oooooouo o +e t o ott ttttt u tu ttt ot utt tt t t tttttt ttt t ut ut tt u u t t tt t t t ot tt u t t ut ut o u o o t t + t t t t t t tttt t tt t t ttt et t t tt t t tt t e t tt t t tt t t t t t tt t tt tt t t t t t t t + t tt t tt t t t t t t t tt t t t e t + t o t t o tt o t ot t t o t t +e o t oh o o o o oo oot to o oo o o oooo o o to o h too oo oo oto o oo oo to oo o o ooto o o t o o to o oto oh o o o ooo o ot tooo o +e o t t th o o t oeto too t to t tot o o oo o h o o o h too to th toto tt oeoto oo to o oo ooto oot oo to e to to tto th o o o ohoo toto t to etht +theo lt thth te lo t oete tollthto t tetto e otte taet to o t too too th ttoto it tt teeto ee to e eo eott e t tet t o to e lo to tto tth o o e thee tetoath totetht +thto tt to to to te t to cettoelthto to toe tteto e ae t to te to te th ttoto e tehe to tet to eoto th t tht este e lo to eto tto eto eo e neeeaoh toe thes +thto t t to lt t t to t to t to totto to t lo to t t th ttot tt t to t to oto th d th toe to t to tto to s o n tt tot th s +t to t o to to to ttto to to t to t totto oto t o t to eo to to th ttoto tttt to o to too oto th d t o to to to to tto tod o to t tt to th +thto th t to to t t o to to t to t totto to t to wo tot th thto t tt to to too to t t o to to to uo tth to o to to t to th +thto th t to to t t to to t to totto to m to to wo tot th thto t tth to to to to t th o to to to to uth to to to t to th +thto ethet to to to tht to to dthto totto to em tonto e wo tot th thto d tth to to to to t th o toe to to e to ethi to eto n t o to th +thto eth t to lo 'o tht eto to 'thto tttt kl eth em thnto e to tot th thto t tthe to to to to th n ethe p toe to to e to thi ett e n th t toe the +tuto eto t to tt 'e tht epo to 'ththt tmtt kl eth n th to e e to tot th toto t tthe totho tot to l etot to oe ethe t toi to to e to thi d tt e thta toe the +tuto lao t todto 'e thtoeco tos't tos totto kl to en to to ee d to tod to toto d 't em totoo tot to p tosoto nipetoe p toio to lo t to toio od o to e t tou toemtoe +tuto to t to tp e tos to to 't to tootoo l eto en to to ee o wo tot to toao t tthem tosoo tod to so t so noth np toe to to deso ltoemtod sm to t eaa toem oe +'utos to t to los e tot epo to 't tosl to tn le tos en to lo h n wo tot to toaon t dt er tos tot tor , wod tos enotossa toe to lo sod tosm d sm tn e theoa toe to +... +g you want to build a ship, don't drum up people together to collect wood and don't assign them tasks and work, but rather teach them to long for the sndless immensity of the sea. +f you want to build a ship, don't drum up people together to collect wood and don't assign them tasks and work, but rather teach them to long for the sndless immensity of the sea. +f you want to build a ship, don't drum up people together to collect wood and don't assign them tasks and work, but rather teach them to long for the sndless immensity of the sea. +f you want to build a ship, don't drum up people together to collect wood and don't assign them tasks and work, but rather teach them to long for the sndless immensity of the sea. +''' +``` + +loss์™€ optimizer๋ฅผ ์ •์˜ํ•˜๊ณ  ํ•™์Šต์„ ์ง„ํ–‰ํ•œ๋‹ค. +์ฒ˜์Œ์—๋Š” ์ƒ๋‹นํžˆ ์ด์ƒํ•œ ๋ฌธ์žฅ๋“ค์ด ์ถœ๋ ฅ๋œ๋‹ค. ํ•˜์ง€๋งŒ ํ•™์Šต์ด ์ง„ํ–‰๋จ์— ๋”ฐ๋ผ ์›๋ž˜์˜ ๋ฌธ์žฅ์— ๊ฐ€๊น๊ฒŒ ๋ฌธ์žฅ๋“ค์ด ์ถœ๋ ฅ๋˜๋Š” ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ์œผ๋ฉฐ, ์ด๋Š” ํ•™์Šต์ด ์ž˜ ๋˜์—ˆ๋‹ค๊ณ  ๋ณผ ์ˆ˜ ์žˆ๋‹ค. \ No newline at end of file diff --git a/_posts/2022-06-10-dlZeroToAll-PyTorch-11-4.markdown b/_posts/2022-06-10-dlZeroToAll-PyTorch-11-4.markdown new file mode 100644 index 00000000000..6e073aaaeb0 --- /dev/null +++ b/_posts/2022-06-10-dlZeroToAll-PyTorch-11-4.markdown @@ -0,0 +1,193 @@ +--- +title: "๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹ 2 - Lab11-4: Timeseries" +author: Kwon +date: 2022-06-10T00:00:00 +0900 +categories: [pytorch, study] +tags: [rnn, timeseries] +math: true +mermaid: false +--- + +[๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹](https://deeplearningzerotoall.github.io/season2/lec_pytorch.html) Lab11-4: Timeseries ๊ฐ•์˜๋ฅผ ๋ณธ ํ›„ ๊ณต๋ถ€๋ฅผ ๋ชฉ์ ์œผ๋กœ ์ž‘์„ฑํ•œ ๊ฒŒ์‹œ๋ฌผ์ž…๋‹ˆ๋‹ค. + +*** + +## Timeseries + +timeseries(์‹œ๊ฒŒ์—ด) data๋Š” ์ผ์ • ์‹œ๊ฐ„ ๊ฐ„๊ฒฉ์œผ๋กœ ๋ฐฐ์น˜๋œ data๋ฅผ ๋งํ•œ๋‹ค. +๋งค์žฅ์˜ ์‹œ๊ฐ„๋ณ„ ๋งค์ถœ, ์š”์ผ๋ณ„ ์ฃผ์‹ ์‹œ๊ฐ€/์ข…๊ฐ€ ๋“ฑ์ด ์—ฌ๊ธฐ์— ์†ํ•  ์ˆ˜ ์žˆ๋‹ค. +์ด๋“ค๋„ ์ˆœ์„œ๊ฐ€ ๋ฐ์ดํ„ฐ์— ํฌํ•จ๋œ ๊ฒฝ์šฐ์ด๋ฏ€๋กœ RNN์„ ์ด์šฉํ•˜์—ฌ ๊ณผ๊ฑฐ์˜ ๋ฐ์ดํ„ฐ๋ฅผ ๊ฐ€์ง€๊ณ  ์˜ˆ์ธก์„ ํ•˜๋Š” ๊ฒƒ์ด ์ข‹์€ ๋ฐฉ๋ฒ•์ผ ์ˆ˜ ์žˆ๋‹ค. (์ˆœ์„œ๋ฅผ ํฌํ•จํ•œ ๋ฐ์ดํ„ฐ๋ผ๊ณ  RNN์ด ๋งŒ๋Šฅ์ด๋ผ๋Š” ๊ฒƒ์€ ์•„๋‹ˆ๋‹ค.) + +์ด๋ฒˆ ์‹ค์Šต์—์„œ๋Š” ์š”์ผ๋ณ„ ์ฃผ์‹ ์ •๋ณด๋“ค์„ ์ด์šฉํ•˜์—ฌ ํ•™์Šต์„ ์ง„ํ–‰ํ•˜์˜€๋‹ค. + +![](/posting_imgs/lab11-4-1.png) + +์œ„์™€ ๊ฐ™์ด ์ผ๋ณ„ ์‹œ์ž‘๊ฐ€, ๊ณ ๊ฐ€, ์ €๊ฐ€, ๊ฑฐ๋ž˜๋Ÿ‰, ์ข…๊ฐ€๊ฐ€ ํฌํ•จ๋œ ๋ฐ์ดํ„ฐ์ด๋‹ค. + +์ด ๋ฐ์ดํ„ฐ๋ฅผ ๊ทธ๋ƒฅ ํ•™์Šต์‹œํ‚ฌ์ˆ˜๋„ ์žˆ์ง€๋งŒ ๊ฐ ๋ฐ์ดํ„ฐ๋“ค์˜ scale์„ ๋งž์ถ”๊ณ  ํ•˜๋Š” ๊ฒƒ์ด ๋” ์ข‹๋‹ค. +๊ฑฐ๋ž˜๋Ÿ‰์„ ์ œ์™ธํ•œ ๊ฐ€๊ฒฉ ์ •๋ณด๋“ค์€ 800 ์ •๋„์˜ ๊ฐ’์— ์žˆ์ง€๋งŒ ๊ฑฐ๋ž˜๋Ÿ‰์€ 100๋งŒ ๋‹จ์œ„์ด๋‹ค. +๋งŒ์•ฝ ์ด๋Œ€๋กœ ๋ฐ”๋กœ ํ•™์Šตํ•œ๋‹ค๋ฉด ๊ฑฐ๋ž˜๋Ÿ‰์— ์น˜์šฐ์ณ์„œ ํ•™์Šต์„ ํ•˜๊ฑฐ๋‚˜ scale์„ ๋งž์ถ”๊ธฐ ์œ„ํ•œ ํ•™์Šต์„ model์ด ์ถ”๊ฐ€์ ์œผ๋กœ ํ•ด์•ผ ํ•˜๋ฏ€๋กœ ํ•„์š”์—†๋Š” ๋ถ€๋‹ด์ด ๋ฐœ์ƒํ•  ์ˆ˜ ์žˆ๋‹ค. +๊ทธ๋ž˜์„œ ๋’ค์— ๋‚˜์˜ฌ ์ฝ”๋“œ์—์„œ๋Š” scaling์„ ํ•˜๊ณ  ํ•™์Šต์„ ์ง„ํ–‰ํ•  ๊ฒƒ์ด๋‹ค. + +*** + +## with Code + +### Imports + +```py +import torch +import torch.optim as optim +import numpy as np +import matplotlib.pyplot as plt + +torch.manual_seed(0) + +# hyper parameters +seq_length = 7 +data_dim = 5 +hidden_dim = 10 +output_dim = 1 +learning_rate = 0.01 +iterations = 500 +``` + +### Data + +๋ฐ์ดํ„ฐ๋ฅผ ๋ถˆ๋Ÿฌ์˜ค๊ณ  70%์˜ ๋ฐ์ดํ„ฐ๋ฅผ train data๋กœ ๋งŒ๋“ค์–ด์ค€๋‹ค. + +```py +# load data +xy = np.loadtxt("data-02-stock_daily.csv", delimiter=",") +xy = xy[::-1] # reverse order + +# split train-test set +train_size = int(len(xy) * 0.7) +train_set = xy[0:train_size] +test_set = xy[train_size - seq_length:] +``` + +์•ž์„œ ์–ธ๊ธ‰ํ•œ๋Œ€๋กœ scaling์„ ํ•˜๊ณ  ํ•™์Šตํ•˜๊ธฐ ์ข‹์€ ํ˜•ํƒœ๋กœ data๋ฅผ ๊ฐ€๊ณตํ•ด์•ผ ํ•œ๋‹ค. + +```py +def minmax_scaler(data): + numerator = data - np.min(data, 0) + denominator = np.max(data, 0) - np.min(data, 0) + return numerator / (denominator + 1e-7) + +train_set = minmax_scaler(train_set) +test_set = minmax_scaler(test_set) +``` + +์ด๋ฒˆ ์—์ œ์—์„œ๋Š” min-max scaling์„ ์ ์šฉํ•  ๊ฒƒ์ด๋‹ค. min-max scaling์€ ์•„๋ž˜ ์‹์„ ํ†ตํ•ด ์ง„ํ–‰๋˜๋Š” scaling์œผ๋กœ ์ตœ์†Œ, ์ตœ๋Œ€๊ฐ’์„ ์‚ฌ์šฉํ•˜์—ฌ 0๊ณผ 1์‚ฌ์ด์˜ ๊ฐ’์œผ๋กœ ๋ฐ”๊พธ์–ด ์ค€๋‹ค. + +\\[x_{scaled}=\frac{x-x_{min}}{x_{max}-x_{min}}\\] + +์ด๋ฒˆ์—๋„ ๋ฐ์ดํ„ฐ์˜ ๊ธธ์ด๊ฐ€ ๊ธธ๊ธฐ ๋•Œ๋ฌธ์— RNN model์— ๋„ฃ์–ด์ค„ ๋งŒํผ ์ž˜๋ผ์„œ ๋ฐ์ดํ„ฐ๋ฅผ ๋งŒ๋“ค์–ด์ค€๋‹ค. + +```py +def build_dataset(time_series, seq_length): + dataX = [] + dataY = [] + for i in range(0, len(time_series) - seq_length): + _x = time_series[i:i + seq_length, :] + _y = time_series[i + seq_length, [-1]] # Next close price + print(_x, "->", _y) + dataX.append(_x) + dataY.append(_y) + return np.array(dataX), np.array(dataY) + +trainX, trainY = build_dataset(train_set, seq_length) +testX, testY = build_dataset(test_set, seq_length) + +'''output +[[2.53065030e-01 2.45070970e-01 2.33983036e-01 4.66075110e-04 + 2.32039560e-01] + [2.29604366e-01 2.39728936e-01 2.54567513e-01 2.98467330e-03 + 2.37426028e-01] + [2.49235510e-01 2.41668371e-01 2.48338489e-01 2.59926504e-04 + 2.26793794e-01] + [2.21013495e-01 2.46602231e-01 2.54710584e-01 0.00000000e+00 + 2.62668239e-01] + [3.63433786e-01 3.70389871e-01 2.67168847e-01 1.24764722e-02 + 2.62105010e-01] + [2.59447633e-01 3.10673724e-01 2.74113889e-01 4.56323384e-01 + 2.71751265e-01] + [2.76008150e-01 2.78314566e-01 1.98470380e-01 5.70171193e-01 + 1.78104644e-01]] -> [0.16053716] +... + [0.88723699 0.88829938 0.92518158 0.08714288 0.90908564] + [0.88939504 0.88829938 0.94014512 0.13380794 0.90030461] + [0.89281215 0.89655181 0.94323484 0.12965206 0.93124657] + [0.91133638 0.91818448 0.95944078 0.1885611 0.95460261]] -> [0.97604677] +''' +``` + +7์ผ๊ฐ„์˜ ์ฃผ์‹ ๋ฐ์ดํ„ฐ(x)๋“ค์„ ํ†ตํ•ด ๊ทธ ๋‹ค์Œ๋‚ ์˜ ์ข…๊ฐ€(y)๋ฅผ ์˜ˆ์ธกํ•˜๋„๋ก ๋ฐ์ดํ„ฐ๋ฅผ ๋งŒ๋“ ๋‹ค. window์˜ ํฌ๊ธฐ๋ฅผ 7๋กœ ํ•ด์„œ ์ž˜๋ž๋‹ค๊ณ  ์ƒ๊ฐํ•˜๋ฉด ๋ ๊ฒƒ ๊ฐ™๋‹ค. + +### Model + +์ด๋ฒˆ์—๋Š” RNN์˜ ํ•œ ์ข…๋ฅ˜์ธ LSTM์„ ์‚ฌ์šฉํ•˜๋ฉฐ ๋งˆ์ง€๋ง‰์— fully connected layer๋ฅผ ์—ฐ๊ฒฐํ•˜์—ฌ ์ถœ๋ ฅ์„ ๋‚ธ๋‹ค. + +![](/posting_imgs/lab11-4-2.png) + +์ด๋Ÿฐ ์‹์œผ๋กœ ๋งˆ์ง€๋ง‰์— fc ์ธต์„ ์ถ”๊ฐ€ํ•˜๋Š” ์ด์œ ๊ฐ€ ๋ช‡๊ฐ€์ง€ ์žˆ๋‹ค. + +๋จผ์ €, data๋ฅผ ์ „๋‹ฌํ•˜๋Š” ๋ถ€๋ถ„๊ณผ label์„ ์ตœ์ข…์ ์œผ๋กœ ๋งž์ถ”๋Š” ๋ถ€๋ถ„์„ ๋ถ„๋ฆฌํ•˜์—ฌ network์— ๊ฐ€ํ•ด์ง€๋Š” ๋ถ€๋‹ด์„ ๋ถ„์‚ฐํ•  ์ˆ˜ ์žˆ๋‹ค. +๊ทธ๋ฆฌ๊ณ  RNN ์ธต์„ ๋ฐ”๋กœ ์ถœ๋ ฅ์— ์—ฐ๊ฒฐํ•  ๊ฒฝ์šฐ ๊ณผ๊ฑฐ์˜ ์ •๋ณด๋ฅผ ์ „๋‹ฌํ•˜๋Š” hidden state๋„ ์ตœ์ข… ์ถœ๋ ฅ์˜ ์ฐจ์›๊ณผ ๋งž์ถฐ์ฃผ์–ด์•ผ ํ•œ๋‹ค. +์ด๋ฒˆ ๊ฒฝ์šฐ๋ฅผ ๋ณด๋ฉด ์ตœ์ข…์ ์œผ๋กœ 1์ฐจ์›์˜ ์ถœ๋ ฅ์„ ๋‚ด์–ด์•ผ ํ•˜๋Š”๋ฐ ์ด๋Š” ์ •๋ณด๋ฅผ ์ „๋‹ฌํ•˜๋Š” hidden state๋„ ์ฐจ์›์ด 1์ด์–ด์•ผ ํ•œ๋‹ค๋Š” ๋œป์ด๋‹ค. +์ด๋Ÿฐ ์ƒํ™ฉ์—์„œ๋Š” model์ด ํ•™์Šต ๋ฟ๋งŒ ์•„๋‹ˆ๋ผ ์ „๋ณด ์ „๋‹ฌ์„ ์œ„ํ•œ ์••์ถ•๋„ ํ•ด์•ผํ•˜๋Š” ๋ถ€๋‹ด์„ ๊ฐ€์ง€๊ฒŒ ๋˜์–ด ํ•™์Šต์— ์•…์˜ํ–ฅ์„ ๋ผ์น  ์ˆ˜ ์žˆ๋‹ค. +๊ทธ๋ž˜์„œ ์ผ๋ฐ˜์ ์œผ๋กœ hidden state์˜ ์ฐจ์›์€ ์ถฉ๋ถ„ํžˆ ๋ณด์žฅํ•ด์ฃผ๊ณ  ๋งˆ์ง€๋ง‰์— fc layer๋ฅผ ์—ฐ๊ฒฐํ•˜์—ฌ ์ถœ๋ ฅ์„ ์™„์„ฑํ•˜๋Š” ๋ฐฉ์‹์„ ์‚ฌ์šฉํ•œ๋‹ค. + +```py +class Net(torch.nn.Module): + def __init__(self, input_dim, hidden_dim, output_dim, layers): + super(Net, self).__init__() + self.rnn = torch.nn.LSTM(input_dim, hidden_dim, num_layers=layers, batch_first=True) + self.fc = torch.nn.Linear(hidden_dim, output_dim, bias=True) + + def forward(self, x): + x, _status = self.rnn(x) + x = self.fc(x[:, -1]) + return x + + +net = Net(data_dim, hidden_dim, output_dim, 1) +``` + +### Train + +loss์™€ optimizer๋ฅผ ์ •์˜ํ•˜๊ณ  ํ•™์Šตํ•œ๋‹ค. + +```py +criterion = torch.nn.MSELoss() +optimizer = optim.Adam(net.parameters(), lr=learning_rate) + +for i in range(iterations): + + optimizer.zero_grad() + outputs = net(trainX_tensor) + loss = criterion(outputs, trainY_tensor) + loss.backward() + optimizer.step() + print(i, loss.item()) +``` + +์˜ˆ์ธกํ•œ ๊ฒƒ๋“ค์„ ํฌ๋ž˜ํ”„๋กœ ๊ทธ๋ ค๋ณด๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค + +```py +plt.plot(testY) +plt.plot(net(testX_tensor).data.numpy()) +plt.legend(['original', 'prediction']) +plt.show() +``` + +![](/posting_imgs/lab11-4-3.png) + +์˜ˆ์ธก์ด ์•„์ฃผ ์ž˜ ๋œ๊ฒƒ ๊ฐ™์•„ ๋ณด์ด์ง€๋งŒ ์‹ค์ œ๋กœ๋Š” ์•„๋‹ˆ๋‹ค. + +![](/posting_imgs/lab11-4-4.png) + +์œ„ ๊ทธ๋ฆผ์€ ๊ทธ๋ž˜ํ”„ ์ค‘ ์ผ๋ถ€๋ฅผ ๊ฐ€์ ธ์˜จ ๊ฒƒ์ธ๋ฐ, ์ž˜ ๋ณด๋ฉด ์˜ˆ์ธก์ด ์˜ค๋ฅธ์ชฝ์œผ๋กœ ํ•œ ์นธ์”ฉ ๋ฐ€๋ฆฐ ๊ฒƒ์ฒ˜๋Ÿผ ๋ณด์ธ๋‹ค. +์ด๋Š” ๋…ธ์ด์ฆˆ๊ฐ€ ์‹ฌํ•œ ๊ธˆ์œต ์‹œ๊ณ„์—ด ๋ฐ์ดํ„ฐ์—์„œ lstm model์˜ ๊ณ ์งˆ์ ์ธ ๋ฌธ์ œ๋กœ ์ง์ „ ๊ฐ’์„ ์˜ˆ์ธก ๊ฐ’์œผ๋กœ ์ถœ๋ ฅํ•˜๋Š” ๊ฒฝ์šฐ๊ฐ€ ์žฆ๋‹ค๊ณ  ํ•œ๋‹ค. (์ด์ „์˜ ๊ฐ’์„ ์˜ˆ์ธก ๊ฐ’์œผ๋กœ ๋‚ด๋Š” ๊ฒƒ์ด ๊ฐ€์žฅ ์ด๋“์ด๋ผ๊ณ  ํŒ๋‹จํ–ˆ๊ธฐ ๋•Œ๋ฌธ) +์‹ค์ œ๋กœ ์‚ฌ์šฉํ•  ๋•Œ์—๋Š” ์ด๋Ÿฐ ๊ฒฝ์šฐ์— ์ •๋ง๋กœ ์˜ˆ์ธก์„ ์ž˜ ํ•œ ๊ฒƒ์ธ์ง€ ์•„๋‹ˆ๋ฉด ์œ„์™€ ๊ฐ™์ด ๋ฐ€๋ ค์„œ ์ž˜ ๋˜์–ด ๋ณด์ด๋Š” ๊ฑด์ง€ ํ™•์ธํ•ด ๋ณผ ํ•„์š”๊ฐ€ ์žˆ์„ ๊ฒƒ ๊ฐ™๋‹ค. \ No newline at end of file diff --git a/_posts/2022-06-10-dlZeroToAll-PyTorch-11-5.markdown b/_posts/2022-06-10-dlZeroToAll-PyTorch-11-5.markdown new file mode 100644 index 00000000000..65bc1fc943d --- /dev/null +++ b/_posts/2022-06-10-dlZeroToAll-PyTorch-11-5.markdown @@ -0,0 +1,354 @@ +--- +title: "๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹ 2 - Lab11-5: Seq2Seq" +author: Kwon +date: 2022-06-10T01:00:00 +0900 +categories: [pytorch, study] +tags: [rnn] +math: true +mermaid: false +--- +[๋ชจ๋‘๋ฅผ ์œ„ํ•œ ๋”ฅ๋Ÿฌ๋‹](https://deeplearningzerotoall.github.io/season2/lec_pytorch.html) Lab11-5: Seq2Seq ๊ฐ•์˜๋ฅผ ๋ณธ ํ›„ ๊ณต๋ถ€๋ฅผ ๋ชฉ์ ์œผ๋กœ ์ž‘์„ฑํ•œ ๊ฒŒ์‹œ๋ฌผ์ž…๋‹ˆ๋‹ค. + +*** + +## Seq2Seq Model + +Seq2Seq model์€ ์•„๋ž˜์™€ ๊ฐ™์€ ๊ตฌ์กฐ๋ฅผ ๊ฐ€์ง€๊ณ  ์žˆ๋‹ค. + +![](/posting_imgs/lab11-5-1.png) + +์ผ์ข…์˜ Encoder-Decoder ๊ตฌ์กฐ๋ผ๊ณ ๋„ ํ•  ์ˆ˜ ์žˆ๋Š”๋ฐ ๋ชจ๋“  ์ž…๋ ฅ์„ ๋‹ค ๋ฐ›์€ ํ›„์— ์ถœ๋ ฅ์„ ์ƒ์„ฑํ•˜๋Š” ๊ตฌ์กฐ์ด๋‹ค. + +์™ผ์ชฝ(ํŒŒ๋ž€์ƒ‰)์—์„œ๋Š” ์ž…๋ ฅ ๋ฐ›์€ ์ •๋ณด๋“ค์„ ์ˆœ์ฐจ์ ์œผ๋กœ ํ•™์Šตํ•˜์—ฌ ์ •๋ณด๋ฅผ vector๋กœ ์••์ถ•ํ•˜๋Š” ์—ญํ• ์„ ํ•˜๊ณ , +์˜ค๋ฅธ์ชฝ(์ฃผํ™ฉ์ƒ‰)์—์„œ๋Š” ์••์ถ•ํ•œ ์ •๋ณด๋ฅผ ์ „๋‹ฌ๋ฐ›์•„ start flag(Start Of Sentence, SOS)์™€ ํ•จ๊ป˜ ๋‹ค์Œ์— ๋“ฑ์žฅํ•  ๋ฐ์ดํ„ฐ๋ฅผ ์˜ˆ์ธกํ•˜๋ฉด์„œ ์ˆœ์ฐจ์ ์œผ๋กœ output์„ ๋‚ด๊ณ  ๋งˆ์ง€๋ง‰์—๋Š” end flag(Etart Of Sentence, EOS)๋ฅผ ์ถœ๋ ฅํ•˜์—ฌ ๋ฐ์ดํ„ฐ์˜ ๋์ด๋ผ๋Š” ๊ฒƒ์„ ์•Œ๋ ค์ค€๋‹ค. + +๊ฐ„๋‹จํ•œ ๋ฌธ์žฅ์— ๋Œ€ํ•œ ๋Œ€๋‹ต์„ ์—๋กœ ๋“ค๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. ๋‚ด๋ถ€ layer๋Š” LSTM๋‚˜ GRU๋ฅผ ์‚ฌ์šฉํ•œ๋‹ค. + +![](/posting_imgs/lab11-5-2.png) + +์ด๋ฅผ ์‚ฌ์šฉํ•˜๋Š” ์—๋กœ๋Š” chatbot์ด ์žˆ์„ ์ˆ˜ ์žˆ๋Š”๋ฐ chatbot์€ ์‚ฌ์šฉ์ž์˜ ์ž…๋ ฅ(๋ฌธ์žฅ)์„ ๋‹ค ๋“ฃ๊ธฐ ์ „์— ๋‹ต๋ณ€์„ ๋งŒ๋“ค ๊ฒฝ์šฐ ์‹ค์ œ ๋ฌธ์žฅ๊ณผ ์ƒ๊ด€์—†๋Š” ๋‹ต๋ณ€์„ ์ƒ์„ฑํ•  ์ˆ˜๋„ ์žˆ๋‹ค. +์ด๋Ÿฐ ๊ฒฝ์šฐ ์ฒ˜๋Ÿผ input sequence์˜ ์ „์ฒด๋ฅผ ๋‹ค ํ™•์ธํ•˜๊ณ  ์ถœ๋ ฅ์ด ์žˆ์–ด์•ผ ํ•˜๋Š” ๊ฒฝ์šฐ์— Seq2Seq model์„ ์‚ฌ์šฉํ•˜๊ฒŒ ๋œ๋‹ค. + +*** + +## with Code + +๊ฐ„๋‹จํ•œ ๋ฒˆ์—ญ์„ ํ•  ์ˆ˜ ์žˆ๋Š” model์„ ํ•™์Šต์‹œํ‚ค๋Š” ์‹ค์Šต์ด๋‹ค. + +### Imports + +```py +import random +import torch +import torch.nn as nn +from torch import optim + +torch.manual_seed(0) +device = torch.device("cuda" if torch.cuda.is_available() else "cpu") +``` + +### Data + +์•„๋ž˜ data๋ฅผ ์ฒ˜๋ฆฌํ•˜์—ฌ ์‚ฌ์šฉํ•  ๊ฒƒ์ด๋‹ค. data๋Š” ๊ฐ™์€ ๋œป์˜ ์˜์–ด์™€ ํ•œ๊ตญ์–ด๋กœ ๊ตฌ์„ฑ๋˜์–ด ์žˆ๊ณ  ๊ฐ ์˜์–ด์™€ ํ•˜๋ˆ†์–ด๋Š” tab์œผ๋กœ ๊ตฌ๋ถ„๋˜์–ด ์žˆ๋‹ค. + +```py +raw = ["I feel hungry. ๋‚˜๋Š” ๋ฐฐ๊ฐ€ ๊ณ ํ”„๋‹ค.", + "Pytorch is very easy. ํŒŒ์ดํ† ์น˜๋Š” ๋งค์šฐ ์‰ฝ๋‹ค.", + "Pytorch is a framework for deep learning. ํŒŒ์ดํ† ์น˜๋Š” ๋”ฅ๋Ÿฌ๋‹์„ ์œ„ํ•œ ํ”„๋ ˆ์ž„์›Œํฌ์ด๋‹ค.", + "Pytorch is very clear to use. ํŒŒ์ดํ† ์น˜๋Š” ์‚ฌ์šฉํ•˜๊ธฐ ๋งค์šฐ ์ง๊ด€์ ์ด๋‹ค."] +``` + +data๋ฅผ ์ „์ฒ˜๋ฆฌํ•˜๋Š” ํ•จ์ˆ˜๋ฅผ ๋งŒ๋“ค์–ด ์‚ฌ์šฉํ•œ๋‹ค. + +```py +# fix token for "start of sentence" and "end of sentence" +SOS_token = 0 +EOS_token = 1 + +class Vocab: + def __init__(self): + self.vocab2index = {"": SOS_token, "": EOS_token} + self.index2vocab = {SOS_token: "", EOS_token: ""} + self.vocab_count = {} + self.n_vocab = len(self.vocab2index) + + def add_vocab(self, sentence): + for word in sentence.split(" "): + if word not in self.vocab2index: + self.vocab2index[word] = self.n_vocab + self.vocab_count[word] = 1 + self.index2vocab[self.n_vocab] = word + self.n_vocab += 1 + else: + self.vocab_count[word] += 1 + +def filter_pair(pair, source_max_length, target_max_length): + return len(pair[0].split(" ")) < source_max_length and len(pair[1].split(" ")) < target_max_length + +def preprocess(corpus, source_max_length, target_max_length): + print("reading corpus...") + pairs = [] + for line in corpus: + pairs.append([s for s in line.strip().lower().split("\t")]) + print("Read {} sentence pairs".format(len(pairs))) + + pairs = [pair for pair in pairs if filter_pair(pair, source_max_length, target_max_length)] + print("Trimmed to {} sentence pairs".format(len(pairs))) + + source_vocab = Vocab() + target_vocab = Vocab() + + print("Counting words...") + for pair in pairs: + source_vocab.add_vocab(pair[0]) + target_vocab.add_vocab(pair[1]) + print("source vocab size =", source_vocab.n_vocab) + print("target vocab size =", target_vocab.n_vocab) + + return pairs, source_vocab, target_vocab +``` + +๋จผ์ € `\t`(tab)์œผ๋กœ ๋‚˜๋ˆ ์„œ pairs์— ๋„ฃ์–ด์ฃผ๊ณ  `filter_pair`๋กœ ๊ฐ ๋ฌธ์žฅ์˜ ๋‹จ์–ด ๊ฐœ์ˆ˜๊ฐ€ `source_max_length`์™€ `target_max_length`๋ฅผ ๋„˜์ง€ ์•Š๋Š” ๊ฒฝ์šฐ๋งŒ ํ•„ํ„ฐ๋งํ•œ๋‹ค. +์ •์ œ๋œ ๋ฐ์ดํ„ฐ๋“ค์„ ๋”ฐ๋กœ ์ •์˜ํ•œ `Vocab` instance๋ฅผ ํ†ตํ•ด ๋‹จ์–ด์˜ ์ข…๋ฅ˜์™€ ๊ทธ ๊ฐœ์ˆ˜๋กœ ์ด๋ฃจ์–ด์ง„ dictionary data๋กœ ๋งŒ๋“ค์–ด์ค€๋‹ค. + +```py +SOURCE_MAX_LENGTH = 10 +TARGET_MAX_LENGTH = 12 + +load_pairs, load_source_vocab, load_target_vocab = preprocess(raw, SOURCE_MAX_LENGTH, TARGET_MAX_LENGTH) +print(random.choice(load_pairs)) + +'''output +reading corpus... +Read 4 sentence pairs +Trimmed to 4 sentence pairs +Counting words... +source vocab size = 17 +target vocab size = 13 +['pytorch is very clear to use.', 'ํŒŒ์ดํ† ์น˜๋Š” ์‚ฌ์šฉํ•˜๊ธฐ ๋งค์šฐ ์ง๊ด€์ ์ด๋‹ค.'] +''' +``` + +4๊ฐœ์˜ ๋ฌธ์žฅ์ด ๋ชจ๋‘ ์ž˜ ๋ณ€ํ™˜์ด ๋˜์—ˆ๊ณ  +๋ฌด์ž‘์œ„๋กœ ํ•˜๋‚˜๋ฅผ ๊ณจ๋ผ ์ถœ๋ ฅํ•ด๋ณด๋ฉด ์œ„์™€ ๊ฐ™์ด ์Œ์ด ์ž˜ ๋‚˜์˜ค๋Š” ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. + +### Model + +Model์€ ์•ž์—์„œ ์–ธ๊ธ‰ํ•œ ๊ฒƒ๊ณผ ๊ฐ™์ด encoder์™€ decoder๋กœ ์ด๋ฃจ์–ด์ ธ ์žˆ๋‹ค. + +```py +class Encoder(nn.Module): + def __init__(self, input_size, hidden_size): + super(Encoder, self).__init__() + self.hidden_size = hidden_size + self.embedding = nn.Embedding(input_size, hidden_size) + self.gru = nn.GRU(hidden_size, hidden_size) + + def forward(self, x, hidden): + x = self.embedding(x).view(1, 1, -1) + x, hidden = self.gru(x, hidden) + return x, hidden + +class Decoder(nn.Module): + def __init__(self, hidden_size, output_size): + super(Decoder, self).__init__() + self.hidden_size = hidden_size + self.embedding = nn.Embedding(output_size, hidden_size) + self.gru = nn.GRU(hidden_size, hidden_size) + self.out = nn.Linear(hidden_size, output_size) + self.softmax = nn.LogSoftmax(dim=1) + + def forward(self, x, hidden): + x = self.embedding(x).view(1, 1, -1) + x, hidden = self.gru(x, hidden) + x = self.softmax(self.out(x[0])) + return x, hidden +``` + +์ด๋ฒˆ model์˜ ๋‚ด๋ถ€๋Š” GRU๋กœ ๊ตฌ์„ฑํ•œ๋‹ค. ๋‹ค๋ฅธ ๊ฒƒ๋“ค์€ ํฌ๊ฒŒ ๋‹ค๋ฅธ ์ ์€ ์—†์ง€๋งŒ ์กฐ๊ธˆ ๋‹ค๋ฅธ ๊ฒƒ์€ embedding ๋ถ€๋ถ„์ด๋‹ค. +source์˜ oen-hot vector๋Š” ๋‹จ์–ด ๊ฐœ์ˆ˜ ๋งŒํผ input size๊ฐ€ ์ปค์ง€๊ฒŒ ๋˜๋Š”๋ฐ ์ด๋•Œ์˜ ์ฐจ์›์€ ๊ทธ๋ƒฅ ํ•™์Šตํ•˜๊ธฐ์— ๋„ˆ๋ฌด ํด ์ˆ˜ ์žˆ๋‹ค. +๊ทธ๋ž˜์„œ `nn.Embedding`์„ ํ†ตํ•ด ์ฐจ์›์„ ์ค„์—ฌ ๋ฐ€์ง‘๋˜๊ฒŒ ๋ฐ”๊ฟ” ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ๋‹ค. + +Decoder์—์„œ๋Š” model์„ ํ†ตํ•ด ๋งŒ๋“  ๊ฒƒ๋“ค์„ softmax๋ฅผ ํ†ตํ•ด ๋‹จ์–ด๋“ค์ด ๋‚˜์˜ฌ ํ™•๋ฅ ๋กœ ๋‚ด๋ณด๋‚ธ๋‹ค. + +### Train + +ํ•™์Šตํ•˜๋Š” ์ฝ”๋“œ๋Š” ์กฐ๊ธˆ ๊ธธ๊ธฐ ๋•Œ๋ฌธ์— ์ž˜๋ผ์„œ ๋ณด์ž. + +๋จผ์ € `trian` ์•ˆ์—์„œ ์‚ฌ์šฉํ•  `tensorize` ํ•จ์ˆ˜๋‹ค. + +```py +# convert sentence to the index tensor with vocab +def tensorize(vocab, sentence): + indexes = [vocab.vocab2index[word] for word in sentence.split(" ")] + indexes.append(vocab.vocab2index[""]) + return torch.Tensor(indexes).long().to(device).view(-1, 1) +``` + +์ „์ฒ˜๋ฆฌ ํ•  ๋•Œ `Vocab`์— ์ €์žฅํ–ˆ๋˜ ๋ฌธ์žฅ๋“ค์„ ํ•™์Šตํ•  ์ˆ˜ ์žˆ๋„๋ก Tensor๋กœ ๋ฐ”๊ฟ”์ฃผ๋Š” ํ•จ์ˆ˜์ด๋‹ค. + +๋‹ค์Œ์€ `train`์˜ ์•ž๋ถ€๋ถ„์ด๋‹ค. ๋ฌด์ž‘์œ„๋กœ `n_iter`๋งŒํผ ๋ฝ‘์•„์„œ batch data๋ฅผ ๋งŒ๋“ค์–ด์ฃผ๊ณ , ์•ž์„œ ์ •์˜ํ•œ `tensorize`๋ฅผ ์•„์šฉํ•˜์—ฌ data๋“ค์„ ๋ชจ๋‘ Tensor๋กœ ๋ฐ”๊ฟ”์ฃผ๊ณ  encoder/decoder์˜ optimizer์™€ loss๋ฅผ ์ •์˜ํ•ด์ค€๋‹ค. + +```py +# training seq2seq +def train(pairs, source_vocab, target_vocab, encoder, decoder, n_iter, print_every=1000, learning_rate=0.01): + loss_total = 0 + + encoder_optimizer = optim.SGD(encoder.parameters(), lr=learning_rate) + decoder_optimizer = optim.SGD(decoder.parameters(), lr=learning_rate) + + training_batch = [random.choice(pairs) for _ in range(n_iter)] + training_source = [tensorize(source_vocab, pair[0]) for pair in training_batch] + training_target = [tensorize(target_vocab, pair[1]) for pair in training_batch] + + criterion = nn.NLLLoss() + + ... +``` + +์œ„์—์„œ ์ฒ˜๋ฆฌํ•˜๊ณ  ์ •์˜ํ•œ ๊ฒƒ๋“ค์„ ๊ธฐ๋ฐ˜์œผ๋กœ encoder๋ฅผ ํ•™์Šตํ•˜๋Š” ๋ถ€๋ถ„์ด๋‹ค. + +```py +# training seq2seq +def train(pairs, source_vocab, target_vocab, encoder, decoder, n_iter, print_every=1000, learning_rate=0.01): + + ... + + for i in range(1, n_iter + 1): + source_tensor = training_source[i - 1] + target_tensor = training_target[i - 1] + + encoder_hidden = torch.zeros([1, 1, encoder.hidden_size]).to(device) + + encoder_optimizer.zero_grad() + decoder_optimizer.zero_grad() + + source_length = source_tensor.size(0) + target_length = target_tensor.size(0) + + loss = 0 + + for enc_input in range(source_length): + _, encoder_hidden = encoder(source_tensor[enc_input], encoder_hidden) + + ... +``` + +๋ฌธ์žฅ์„ ํ•˜๋‚˜์”ฉ ๊ฐ€์ ธ์™€์„œ ๋ฌธ์žฅ์˜ ๋‹จ์–ด๋ฅผ ํ•˜๋‚˜์”ฉ ์ˆœ์ฐจ์ ์œผ๋กœ ๋„ฃ์–ด๊ฐ€๋ฉด์„œ encoder์˜ ์ถœ๋ ฅ์„ ๋งŒ๋“ ๋‹ค. ์ด๋•Œ ๊ฐ ํ•™์Šต์˜ ๋งจ ์ฒ˜์Œ `encoder_hidden`์€ 0์œผ๋กœ ์ฑ„์šด Tensor๋ฅผ ์‚ฌ์šฉํ•œ๋‹ค. + +๋‹ค์Œ์€ decoder์˜ ํ•™์Šต์ด๋‹ค. + +```py +# training seq2seq +def train(pairs, source_vocab, target_vocab, encoder, decoder, n_iter, print_every=1000, learning_rate=0.01): + + ... + + for i in range(1, n_iter + 1): + ... + + decoder_input = torch.Tensor([[SOS_token]]).long().to(device) + decoder_hidden = encoder_hidden # connect encoder output to decoder input + + for di in range(target_length): + decoder_output, decoder_hidden = decoder(decoder_input, decoder_hidden) + loss += criterion(decoder_output, target_tensor[di]) + decoder_input = target_tensor[di] # teacher forcing + + loss.backward() + + encoder_optimizer.step() + decoder_optimizer.step() + + loss_iter = loss.item() / target_length + loss_total += loss_iter + + if i % print_every == 0: + loss_avg = loss_total / print_every + loss_total = 0 + print("[{} - {}%] loss = {:05.4f}".format(i, i / n_iter * 100, loss_avg)) +``` + +decoder์˜ ์ฒซ input์€ SOS๋กœ, hidden state๋Š” encoder์˜ ์ตœ์ข… hidden state๋กœ ๋„ฃ์–ด์ค€๋‹ค. +for๋ฌธ์„ ๋™๋ฉด์„œ decoder๋ฅผ ํ†ต๊ณผ์‹œํ‚ค๋Š” ๋ถ€๋ถ„์„ ๋ณด๋ฉด decoder์—์„œ ๋‚˜์˜จ output์„ ๋‹ค์Œ input์œผ๋กœ ์‚ฌ์šฉํ•˜๋Š” ๊ฒƒ์ด ์•„๋‹ˆ๋ผ ์‹ค์ œ label์„ ๋‹ค์Œ cell์— ๋„ฃ์–ด์ฃผ๋Š” ๊ฒƒ์„ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. +์ด๊ฑด **Teacher Forcing**์ด๋ผ๋Š” ๋ฐฉ๋ฒ•์œผ๋กœ ์ด์ „ cell์˜ output์„ ์‚ฌ์šฉํ•˜๋Š” ๊ฒƒ๋ณด๋‹ค ํ•™์Šต์ด ๋น ๋ฅด์ง€๋งŒ ๋ถˆ์•ˆ์ •ํ•˜๋‹ค๋Š” ํŠน์ง•์„ ๊ฐ€์ง€๊ณ  ์žˆ๋‹ค. + +์ด์ „๊นŒ์ง€ ์ง„ํ–‰ํ–ˆ๋˜ ๊ณผ์ •๋“ค์„ ๋ฐ”ํƒ•์œผ๋กœ `step`์„ ํ†ตํ•ด ํ•™์Šต์„ ์ง„ํ–‰ํ•˜๊ณ  `n_iter`๋งŒํผ ๋ฐ˜๋ณตํ•œ๋‹ค. + +```py +train(load_pairs, load_source_vocab, load_target_vocab, enc, dec, 5000, print_every=1000) + +'''output +[1000 - 20.0%] loss = 0.0285 +[2000 - 40.0%] loss = 0.0168 +[3000 - 60.0%] loss = 0.0119 +[4000 - 80.0%] loss = 0.0091 +[5000 - 100.0%] loss = 0.0074 +''' +``` + +์‹ค์ œ๋กœ ํ•™์Šตํ•˜๋ฉด์„œ ํ™•์ธํ•œ ๊ฒฐ๊ณผ loss๊ฐ€ ์ž˜ ๊ฐ์†Œํ•˜์˜€๋‹ค. + +### Evaluate + +๋งˆ์ง€๋ง‰์œผ๋กœ ํ‰๊ฐ€๋ฅผ ์œ„ํ•œ ํ•จ์ˆ˜๋กœ, ์‹ค์ œ pair๋ฅผ ์ถœ๋ ฅํ•˜๊ณ  ๊ทธ์— ๋Œ€ํ•œ ์˜ˆ์ธก๋„ ํ•จ๊ป˜ ์ถœ๋ ฅํ•˜์—ฌ ์ž˜ ํ•™์Šต๋˜์—ˆ๋Š”์ง€ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋Š” ํ•จ์ˆ˜์ด๋‹ค. + +```py +def evaluate(pairs, source_vocab, target_vocab, encoder, decoder, target_max_length): + for pair in pairs: + print(">", pair[0]) + print("=", pair[1]) + source_tensor = tensorize(source_vocab, pair[0]) + source_length = source_tensor.size()[0] + encoder_hidden = torch.zeros([1, 1, encoder.hidden_size]).to(device) + + for ei in range(source_length): + _, encoder_hidden = encoder(source_tensor[ei], encoder_hidden) + + decoder_input = torch.Tensor([[SOS_token]]).long().to(device) # ์ˆ˜์ •ํ•ด์•ผ ์ž‘๋™ + decoder_hidden = encoder_hidden + decoded_words = [] + + for di in range(target_max_length): + decoder_output, decoder_hidden = decoder(decoder_input, decoder_hidden) + _, top_index = decoder_output.data.topk(1) # 1๊ฐœ์˜ ๊ฐ€์žฅ ํฐ ์š”์†Œ๋ฅผ ๋ฐ˜ํ™˜ + if top_index.item() == EOS_token: + decoded_words.append("") + break + else: + decoded_words.append(target_vocab.index2vocab[top_index.item()]) + + decoder_input = top_index.squeeze().detach() + + predict_words = decoded_words + predict_sentence = " ".join(predict_words) + print("<", predict_sentence) + print("") +``` + +data๋ฅผ tensor๋กœ ๋งŒ๋“ค์–ด์ฃผ๊ณ  ํ•™์Šตํ–ˆ๋˜ encoder์™€ decoder๋ฅผ ํ†ต๊ณผ์‹œ์ผœ model์ด ์—์ธกํ•œ ๋ฌธ์žฅ์„ ์ถœ๋ ฅํ•œ๋‹ค. +์ด๋•Œ decoder์˜ ์‹ค์ œ ์ถœ๋ ฅ์€ softmax๋ฅผ ํ†ตํ•ด ๋‚˜์˜จ ํ™•๋ฅ ๋“ค์ด๊ธฐ ๋•Œ๋ฌธ์— `topk(1)`๋กœ ๊ฐ€์žฅ ํฐ ๊ฐ’์˜ index๋ฅผ ๋ฐ›์•„ `Vocab`์˜ ๋‹จ์–ด๋กœ ๋ฐ”๊ฟ”์ค€๋‹ค. + +```py +evaluate(load_pairs, load_source_vocab, load_target_vocab, enc, dec, TARGET_MAX_LENGTH) + +'''output +> i feel hungry. += ๋‚˜๋Š” ๋ฐฐ๊ฐ€ ๊ณ ํ”„๋‹ค. +< ๋‚˜๋Š” ๋ฐฐ๊ฐ€ ๊ณ ํ”„๋‹ค. + +> pytorch is very easy. += ํŒŒ์ดํ† ์น˜๋Š” ๋งค์šฐ ์‰ฝ๋‹ค. +< ํŒŒ์ดํ† ์น˜๋Š” ๋งค์šฐ ์‰ฝ๋‹ค. + +> pytorch is a framework for deep learning. += ํŒŒ์ดํ† ์น˜๋Š” ๋”ฅ๋Ÿฌ๋‹์„ ์œ„ํ•œ ํ”„๋ ˆ์ž„์›Œํฌ์ด๋‹ค. +< ํŒŒ์ดํ† ์น˜๋Š” ๋”ฅ๋Ÿฌ๋‹์„ ์œ„ํ•œ ํ”„๋ ˆ์ž„์›Œํฌ์ด๋‹ค. + +> pytorch is very clear to use. += ํŒŒ์ดํ† ์น˜๋Š” ์‚ฌ์šฉํ•˜๊ธฐ ๋งค์šฐ ์ง๊ด€์ ์ด๋‹ค. +< ํŒŒ์ดํ† ์น˜๋Š” ์‚ฌ์šฉํ•˜๊ธฐ ๋งค์šฐ ์ง๊ด€์ ์ด๋‹ค. +''' +``` + +์ถœ๋ ฅ ๊ฒฐ๊ณผ ๊ธฐ์กด์˜ ๋ฌธ์žฅ๋“ค์— ๋Œ€ํ•ด ํ•™์Šต์„ ์ž˜ ํ•œ ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. \ No newline at end of file diff --git a/_posts/2022-06-13-boj_1009.markdown b/_posts/2022-06-13-boj_1009.markdown new file mode 100644 index 00000000000..b5a55fe84a9 --- /dev/null +++ b/_posts/2022-06-13-boj_1009.markdown @@ -0,0 +1,107 @@ +--- +title: "[BOJ] ๋ถ„์‚ฐ ์ฒ˜๋ฆฌ - 1009 (B2)" +author: kwon +date: 2022-06-13T23:00:00 +0900 +categories: [boj, bronze] +tags: [math, implementation] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 1 ์ดˆ | 128 MB | + +# ๋ฌธ์ œ + +์žฌ์šฉ์ด๋Š” ์ตœ์‹  ์ปดํ“จํ„ฐ 10๋Œ€๋ฅผ ๊ฐ€์ง€๊ณ  ์žˆ๋‹ค. ์–ด๋А ๋‚  ์žฌ์šฉ์ด๋Š” ๋งŽ์€ ๋ฐ์ดํ„ฐ๋ฅผ ์ฒ˜๋ฆฌํ•ด์•ผ ๋  ์ผ์ด ์ƒ๊ฒจ์„œ ๊ฐ ์ปดํ“จํ„ฐ์— 1๋ฒˆ๋ถ€ํ„ฐ 10๋ฒˆ๊นŒ์ง€์˜ ๋ฒˆํ˜ธ๋ฅผ ๋ถ€์—ฌํ•˜๊ณ , 10๋Œ€์˜ ์ปดํ“จํ„ฐ๊ฐ€ ๋‹ค์Œ๊ณผ ๊ฐ™์€ย ๋ฐฉ๋ฒ•์œผ๋กœ ๋ฐ์ดํ„ฐ๋“ค์„ ์ฒ˜๋ฆฌํ•˜๊ธฐ๋กœ ํ•˜์˜€๋‹ค. + +1๋ฒˆ ๋ฐ์ดํ„ฐ๋Š” 1๋ฒˆ ์ปดํ“จํ„ฐ, 2๋ฒˆ ๋ฐ์ดํ„ฐ๋Š” 2๋ฒˆ ์ปดํ“จํ„ฐ, 3๋ฒˆ ๋ฐ์ดํ„ฐ๋Š” 3๋ฒˆ ์ปดํ“จํ„ฐ, ... , + +10๋ฒˆ ๋ฐ์ดํ„ฐ๋Š” 10๋ฒˆ ์ปดํ“จํ„ฐ, 11๋ฒˆ ๋ฐ์ดํ„ฐ๋Š” 1๋ฒˆ ์ปดํ“จํ„ฐ, 12๋ฒˆ ๋ฐ์ดํ„ฐ๋Š” 2๋ฒˆ ์ปดํ“จํ„ฐ, ... + +์ด ๋ฐ์ดํ„ฐ์˜ ๊ฐœ์ˆ˜๋Š” ํ•ญ์ƒ $a^b$๊ฐœ์˜ ํ˜•ํƒœ๋กœ ์ฃผ์–ด์ง„๋‹ค. ์žฌ์šฉ์ด๋Š” ๋ฌธ๋“ ๋งˆ์ง€๋ง‰ ๋ฐ์ดํ„ฐ๊ฐ€ ์ฒ˜๋ฆฌ๋ ย ์ปดํ“จํ„ฐ์˜ ๋ฒˆํ˜ธ๊ฐ€ ๊ถ๊ธˆํ•ด์กŒ๋‹ค. ์ด๋ฅผ ์ˆ˜ํ–‰ํ•ด์ฃผ๋Š” ํ”„๋กœ๊ทธ๋žจ์„ ์ž‘์„ฑํ•˜๋ผ. + +# ์ž…๋ ฅ + +์ž…๋ ฅ์˜ ์ฒซ ์ค„์—๋Š” ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์˜ ๊ฐœ์ˆ˜ T๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. ๊ทธ ๋‹ค์Œ ์ค„๋ถ€ํ„ฐ ๊ฐ๊ฐ์˜ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์— ๋Œ€ํ•ด ์ •์ˆ˜ a์™€ b๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. (1 โ‰ค a < 100, 1 โ‰ค b < 1,000,000) + +# ์ถœ๋ ฅ + +๊ฐ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์— ๋Œ€ํ•ด ๋งˆ์ง€๋ง‰ ๋ฐ์ดํ„ฐ๊ฐ€ ์ฒ˜๋ฆฌ๋˜๋Š” ์ปดํ“จํ„ฐ์˜ ๋ฒˆํ˜ธ๋ฅผ ์ถœ๋ ฅํ•œ๋‹ค. + +# ํ’€์ด + +์ฃผ์–ด์ง„ ์ˆซ์ž๋Š” ์ œ๊ณฑ์˜ ํ˜•ํƒœ๋กœ ์ฃผ์–ด์ง€๋ฉฐ ์ตœ๋Œ€ $1000000^{100}$์ด ๋  ์ˆ˜ ์žˆ๊ธฐ ๋•Œ๋ฌธ์— ์ปดํ“จํ„ฐ๋กœ ์ฒ˜๋ฆฌํ•˜๋Š” ๊ฒƒ์„ ๋ชจ๋‘ ์‹œ๋ฎฌ๋ ˆ์ด์…˜ํ•˜๋Š” ๊ฒƒ์œผ๋กœ๋Š” ์‹œ๊ฐ„ ์ œํ•œ์„ ๋งž์ถœ ์ˆ˜ ์—†๋‹ค. ๋ชจ๋‘ ํ™•์ธํ•˜์ง€ ์•Š๊ณ  ์–ด๋–ค ์ปดํ“จํ„ฐ๊ฐ€ ์ฒ˜๋ฆฌํ–ˆ๋Š”์ง€ ์•Œ ์ˆ˜ ์žˆ๋Š” ๋ฐฉ๋ฒ•์€ ์ฃผ์–ด์ง„ ์ˆซ์ž์˜ 1์˜ ์ž๋ฆฌ๋ฅผ ํ™œ์šฉํ•˜๋Š” ๊ฒƒ์ด๋‹ค. ์–ด๋–ค 1์˜ ์ž๋ฆฌ์˜ ์ˆ˜๋ฅผ n๋ฒˆ ๊ณฑํ•˜๋ฉด ๊ณ„์‚ฐ๋œ ์ˆ˜์˜ 1์˜ ์ž๋ฆฌ๊ฐ€ ์ฃผ๊ธฐ์ ์œผ๋กœ ๋ฐ˜๋ณต๋˜๋Š” ๊ฒƒ์„ ์•Œ ์ˆ˜ ์žˆ๊ณ  ๊ทธ ์ˆ˜๋Š” ์•„๋ž˜์™€ ๊ฐ™์ด ๋‚˜ํƒ€๋‚ผ ์ˆ˜ ์žˆ๋‹ค. + +```python +nums = {1 : [1], + 2 : [2, 4, 8, 6], + 3 : [3, 9, 7, 1], + 4 : [4, 6], + 5 : [5], + 6 : [6], + 7 : [7, 9, 3, 1], + 8 : [8, 4, 2, 6], + 9 : [9, 1]} +``` + +ํ•˜์ง€๋งŒ ๋ฌดํ„ฑ๋Œ€๊ณ  $a^b$๋ฅผ ๊ณ„์‚ฐํ•ด์„œ 1์˜ ์ž๋ฆฌ๋ฅผ ๊ตฌํ•œ๋‹ค๊ณ  ํ•ด๋„ ์ค‘๋ณต๋˜๋Š” ๊ฐ’๋“ค์ด ์žˆ๊ธฐ ๋•Œ๋ฌธ์— ์ •ํ™•ํžˆ ์–ด๋–ค ์ˆ˜์˜ ์ œ๊ณฑ์ธ์ง€ ์•Œ ์ˆ˜ ์—†๋‹ค. ๊ทธ๋ž˜์„œ ๋จผ์ € a๋ฅผ nums์˜ ํ‚ค๋กœ ๋„ฃ์–ด ์ œ๊ณฑ ํ–ˆ์„ ๋•Œ 1์˜ ์ž๋ฆฌ์— ๋‚˜์˜ฌ ๊ฐ’๋“ค์„ ์ถ”๋ ค๋‚ด๊ณ  ๊ทธ ์ค‘ b๋ฒˆ์งธ ๊ฐ’์„ ๋‹ต์œผ๋กœ ์ถœ๋ ฅํ•œ๋‹ค. b๋ฒˆ์งธ ๊ฐ’์ด ๋‹ต์ด ๋  ์ˆ˜ ์žˆ๋Š” ์ด์œ ๋Š” a๋ฅผ b๋ฒˆ ๊ณฑํ•˜๋Š” ๋™์•ˆ์— 1์˜ ์ž๋ฆฌ์˜ ๊ฐ’๋„ b๋ฒˆ ๋ฐ”๋€” ๊ฒƒ์ด๊ธฐ ๋•Œ๋ฌธ์ด๋‹ค. + +์˜ˆ๋ฅผ ๋“ค์–ด a๊ฐ€ 124, b๊ฐ€ 16์ด๋ผ๊ณ  ํ•˜๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™์€ ๊ณผ์ •์„ ๊ฑฐ์น˜๊ฒŒ ๋œ๋‹ค. + +```python +n = int(input()) + +nums = {1 : [1], + 2 : [2, 4, 8, 6], + 3 : [3, 9, 7, 1], + 4 : [4, 6], + 5 : [5], + 6 : [6], + 7 : [7, 9, 3, 1], + 8 : [8, 4, 2, 6], + 9 : [9, 1]} + +for _ in range(n): + a, b = map(int, input().split()) + # a์˜ 1์˜ ์ž๋ฆฌ์ˆ˜๋ฅผ ๊ตฌํ•œ๋‹ค -> 4 + one = a % 10 + + # 1์˜ ์ž๋ฆฌ์— ๋‚˜์˜ฌ ์ˆ˜ ์žˆ๋Š” ๊ฐ’๋“ค์„ ๊ฐ€์ ธ์˜จ๋‹ค -> nums[4] -> [4, 6] + # ์ด ์ค‘์—์„œ b๋ฒˆ์งธ ๊ฐ’์„ ์ทจํ•œ๋‹ค. 4, 6, 4, 6, 4, 6 -> 6 + print(nums[one][(b - 1) % len(nums[one])]) +``` + +๋‹จ a์˜ 1์˜ ์ž๋ฆฌ๊ฐ€ 0์ผ ๊ฒฝ์šฐ ๋ฌด์กฐ๊ฑด ์ œ๊ณฑํ•œ ์ˆ˜์˜ 1์˜ ์ž๋ฆฌ๋„ 0์ด๋ฏ€๋กœ 10๋ฒˆ ์ปดํ“จํ„ฐ๊ฐ€ ์ฒ˜๋ฆฌํ•˜๊ฒŒ ๋œ๋‹ค. + +```python +a, b = map(int, input().split()) +one = a % 10 +if not one: + print(10) +``` + +## ์ฝ”๋“œ + +```python +n = int(input()) + +nums = {1 : [1], + 2 : [2, 4, 8, 6], + 3 : [3, 9, 7, 1], + 4 : [4, 6], + 5 : [5], + 6 : [6], + 7 : [7, 9, 3, 1], + 8 : [8, 4, 2, 6], + 9 : [9, 1]} + +for _ in range(n): + a, b = map(int, input().split()) + one = a % 10 + if not one: + print(10) + else: + print(nums[one][(b - 1) % len(nums[one])]) +``` \ No newline at end of file diff --git a/_posts/2022-09-06-boj_1406.markdown b/_posts/2022-09-06-boj_1406.markdown new file mode 100644 index 00000000000..2438b7f8ca9 --- /dev/null +++ b/_posts/2022-09-06-boj_1406.markdown @@ -0,0 +1,86 @@ +--- +title: "[BOJ] ์—๋””ํ„ฐ - 1406 (S2) +)" +author: kwon +date: 2022-9-6T14:00:00 +0900 +categories: [boj, silver] +tags: [data structure, stack, linked list] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 0.3 | 512 | + +# ๋ฌธ์ œ + +ํ•œ ์ค„๋กœ ๋œ ๊ฐ„๋‹จํ•œ ์—๋””ํ„ฐ๋ฅผ ๊ตฌํ˜„ํ•˜๋ ค๊ณ  ํ•œ๋‹ค. ์ด ํŽธ์ง‘๊ธฐ๋Š” ์˜์–ด ์†Œ๋ฌธ์ž๋งŒ์„ ๊ธฐ๋กํ•  ์ˆ˜ ์žˆ๋Š” ํŽธ์ง‘๊ธฐ๋กœ, ์ตœ๋Œ€ 600,000๊ธ€์ž๊นŒ์ง€ ์ž…๋ ฅํ•  ์ˆ˜ ์žˆ๋‹ค. + +์ด ํŽธ์ง‘๊ธฐ์—๋Š” '์ปค์„œ'๋ผ๋Š” ๊ฒƒ์ด ์žˆ๋Š”๋ฐ, ์ปค์„œ๋Š” ๋ฌธ์žฅ์˜ ๋งจ ์•ž(์ฒซ ๋ฒˆ์งธ ๋ฌธ์ž์˜ ์™ผ์ชฝ), ๋ฌธ์žฅ์˜ ๋งจ ๋’ค(๋งˆ์ง€๋ง‰ ๋ฌธ์ž์˜ ์˜ค๋ฅธ์ชฝ), ๋˜๋Š” ๋ฌธ์žฅ ์ค‘๊ฐ„ ์ž„์˜์˜ ๊ณณ(๋ชจ๋“  ์—ฐ์†๋œ ๋‘ ๋ฌธ์ž ์‚ฌ์ด)์— ์œ„์น˜ํ•  ์ˆ˜ ์žˆ๋‹ค. ์ฆ‰ ๊ธธ์ด๊ฐ€ L์ธ ๋ฌธ์ž์—ด์ด ํ˜„์žฌ ํŽธ์ง‘๊ธฐ์— ์ž…๋ ฅ๋˜์–ด ์žˆ์œผ๋ฉด, ์ปค์„œ๊ฐ€ ์œ„์น˜ํ•  ์ˆ˜ ์žˆ๋Š” ๊ณณ์€ L+1๊ฐ€์ง€ ๊ฒฝ์šฐ๊ฐ€ ์žˆ๋‹ค. + +์ด ํŽธ์ง‘๊ธฐ๊ฐ€ ์ง€์›ํ•˜๋Š” ๋ช…๋ น์–ด๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. + +| L | ์ปค์„œ๋ฅผ ์™ผ์ชฝ์œผ๋กœ ํ•œ ์นธ ์˜ฎ๊น€ (์ปค์„œ๊ฐ€ ๋ฌธ์žฅ์˜ ๋งจ ์•ž์ด๋ฉด ๋ฌด์‹œ๋จ) | +| --- | --- | +| D | ์ปค์„œ๋ฅผ ์˜ค๋ฅธ์ชฝ์œผ๋กœ ํ•œ ์นธ ์˜ฎ๊น€ (์ปค์„œ๊ฐ€ ๋ฌธ์žฅ์˜ ๋งจ ๋’ค์ด๋ฉด ๋ฌด์‹œ๋จ) | +| B | ์ปค์„œ ์™ผ์ชฝ์— ์žˆ๋Š” ๋ฌธ์ž๋ฅผ ์‚ญ์ œํ•จ (์ปค์„œ๊ฐ€ ๋ฌธ์žฅ์˜ ๋งจ ์•ž์ด๋ฉด ๋ฌด์‹œ๋จ)์‚ญ์ œ๋กœ ์ธํ•ด ์ปค์„œ๋Š” ํ•œ ์นธ ์™ผ์ชฝ์œผ๋กœ ์ด๋™ํ•œ ๊ฒƒ์ฒ˜๋Ÿผ ๋‚˜ํƒ€๋‚˜์ง€๋งŒ, ์‹ค์ œ๋กœ ์ปค์„œ์˜ ์˜ค๋ฅธ์ชฝ์— ์žˆ๋˜ ๋ฌธ์ž๋Š” ๊ทธ๋Œ€๋กœ์ž„ | +| Pย $ | $๋ผ๋Š” ๋ฌธ์ž๋ฅผ ์ปค์„œ ์™ผ์ชฝ์— ์ถ”๊ฐ€ํ•จ | + +์ดˆ๊ธฐ์— ํŽธ์ง‘๊ธฐ์— ์ž…๋ ฅ๋˜์–ด ์žˆ๋Š” ๋ฌธ์ž์—ด์ด ์ฃผ์–ด์ง€๊ณ , ๊ทธ ์ดํ›„ ์ž…๋ ฅํ•œ ๋ช…๋ น์–ด๊ฐ€ ์ฐจ๋ก€๋กœ ์ฃผ์–ด์กŒ์„ ๋•Œ, ๋ชจ๋“  ๋ช…๋ น์–ด๋ฅผ ์ˆ˜ํ–‰ํ•˜๊ณ  ๋‚œ ํ›„ ํŽธ์ง‘๊ธฐ์— ์ž…๋ ฅ๋˜์–ด ์žˆ๋Š” ๋ฌธ์ž์—ด์„ ๊ตฌํ•˜๋Š” ํ”„๋กœ๊ทธ๋žจ์„ ์ž‘์„ฑํ•˜์‹œ์˜ค. ๋‹จ, ๋ช…๋ น์–ด๊ฐ€ ์ˆ˜ํ–‰๋˜๊ธฐ ์ „์— ์ปค์„œ๋Š” ๋ฌธ์žฅ์˜ ๋งจ ๋’ค์— ์œ„์น˜ํ•˜๊ณ  ์žˆ๋‹ค๊ณ  ํ•œ๋‹ค. + +# ์ž…๋ ฅ + +์ฒซ์งธ ์ค„์—๋Š” ์ดˆ๊ธฐ์— ํŽธ์ง‘๊ธฐ์— ์ž…๋ ฅ๋˜์–ด ์žˆ๋Š” ๋ฌธ์ž์—ด์ด ์ฃผ์–ด์ง„๋‹ค. ์ด ๋ฌธ์ž์—ด์€ ๊ธธ์ด๊ฐ€ N์ด๊ณ , ์˜์–ด ์†Œ๋ฌธ์ž๋กœ๋งŒ ์ด๋ฃจ์–ด์ ธ ์žˆ์œผ๋ฉฐ, ๊ธธ์ด๋Š” 100,000์„ ๋„˜์ง€ ์•Š๋Š”๋‹ค. ๋‘˜์งธ ์ค„์—๋Š” ์ž…๋ ฅํ•  ๋ช…๋ น์–ด์˜ ๊ฐœ์ˆ˜๋ฅผ ๋‚˜ํƒ€๋‚ด๋Š” ์ •์ˆ˜ M(1 โ‰ค Mย โ‰ค 500,000)์ด ์ฃผ์–ด์ง„๋‹ค. ์…‹์งธ ์ค„๋ถ€ํ„ฐ M๊ฐœ์˜ ์ค„์— ๊ฑธ์ณ ์ž…๋ ฅํ•  ๋ช…๋ น์–ด๊ฐ€ ์ˆœ์„œ๋Œ€๋กœ ์ฃผ์–ด์ง„๋‹ค. ๋ช…๋ น์–ด๋Š” ์œ„์˜ ๋„ค ๊ฐ€์ง€ ์ค‘ ํ•˜๋‚˜์˜ ํ˜•ํƒœ๋กœ๋งŒ ์ฃผ์–ด์ง„๋‹ค. + +# ์ถœ๋ ฅ + +์ฒซ์งธ ์ค„์— ๋ชจ๋“  ๋ช…๋ น์–ด๋ฅผ ์ˆ˜ํ–‰ํ•˜๊ณ  ๋‚œ ํ›„ ํŽธ์ง‘๊ธฐ์— ์ž…๋ ฅ๋˜์–ด ์žˆ๋Š” ๋ฌธ์ž์—ด์„ ์ถœ๋ ฅํ•œ๋‹ค. + +# ํ’€์ด + +์ปค์„œ๋ฅผ ์›€์ง์ด๊ฑฐ๋‚˜ ๊ธฐ์ค€์œผ๋กœ ๋ฌธ์ž๋ฅผ ์‚ญ์ œ/์ถ”๊ฐ€ํ•˜๋Š” ๋ช…๋ น์„ ์ˆ˜ํ–‰ํ•ด์•ผ ํ•œ๋‹ค. ์ด๋ฅผ ์œ„ํ•ด ์ปค์„œ๋ฅผ ์ธ๋ฑ์Šค ๊ฐ’์œผ๋กœ ๋‘๊ณ  ๊ตฌํ˜„ํ•  ์ˆ˜๋„ ์žˆ๊ฒ ์ง€๋งŒ stack์˜ ๊ด€์ ์œผ๋กœ ๋ณธ๋‹ค๋ฉด ์กฐ๊ธˆ ๋” ํŽธํ•˜๊ฒŒ ๋ฌธ์ œ๋ฅผ ํ•ด๊ฒฐํ•  ์ˆ˜ ์žˆ๋‹ค. + +![](/posting_imgs/boj_1406-1.png) + +์œ„ ๊ทธ๋ฆผ๊ณผ ๊ฐ™์ด ์ปค์„œ๋ฅผ ์ค‘์‹ฌ์œผ๋กœ ์–‘ ์ชฝ์˜ ๋ฌธ์ž์—ด๋“ค์„ ์Šคํƒ์œผ๋กœ ์ƒ๊ฐํ•˜๋ฉด ๋ฌธ์ œ์—์„œ ์ฃผ์–ด์ง„ 4 ๊ฐœ์˜ ๋ช…๋ น์–ด๋ฅผ ์ˆ˜ํ–‰ํ•˜๊ธฐ ํŽธํ•ด์ง„๋‹ค. ๋‹จ, ์—ฌ๊ธฐ์„œ ์Šคํƒ์˜ ๊ฐ€์žฅ ์œ„๋ฅผ ์ปค์„œ๊ฐ€ ์žˆ๋Š” ๋ฐฉํ–ฅ์œผ๋กœ ์ƒ๊ฐํ•œ๋‹ค. ์ฆ‰, ์™ผ์ชฝ ์Šคํƒ์€ ์˜ค๋ฅธ์ชฝ์ด ์Šคํƒ์˜ ์œ„์ชฝ์ด๋ฉฐ ์˜ค๋ฅธ์ชฝ ์Šคํƒ์€ ์™ผ์ชฝ์ด ์Šคํƒ์˜ ์œ„์ชฝ์ด ๋œ๋‹ค. + +๋จผ์ €, ์ปค์„œ๋ฅผ ์˜ฎ๊ธฐ๋Š” ๊ฒฝ์šฐ์—๋Š” ํ•œ ์ชฝ์˜ ์Šคํƒ์˜ ๊ฐ€์žฅ ์œ„์— ์žˆ๋Š” ๋ฌธ์ž์—ด์„ ๋ฐ˜๋Œ€์ชฝ ์Šคํƒ์œผ๋กœ ๋„ฃ์–ด์ฃผ๋ฉด ๋œ๋‹ค. + +![](/posting_imgs/boj_1406-2.png) + +์˜ˆ๋ฅผ ๋“ค์–ด **D** ๋ช…๋ น์„ ํ†ตํ•ด ์ปค์„œ๋ฅผ ์˜ค๋ฅธ์ชฝ์œผ๋กœ ์˜ฎ๊ธธ ๋•Œ ์œ„ ๊ทธ๋ฆผ์ฒ˜๋Ÿผ ์˜ค๋ฅธ์ชฝ ์Šคํƒ์˜ ๊ฐ€์žฅ ์œ„์— ์žˆ๋Š” f๋ฅผ ๋นผ์„œ ์™ผ์ชฝ ์Šคํƒ์œผ๋กœ ๋„ฃ์–ด์ฃผ๋ฉด ์ปค์„œ๋ฅผ ์˜ค๋ฅธ์ชฝ์œผ๋กœ ์›€์ง์ด๋Š” ๊ฒƒ๊ณผ ๊ฐ™์€ ํšจ๊ณผ๋ฅผ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. + +์‚ญ์ œ ๋ช…๋ น์€ ์ปค์„œ๊ฐ€ ๋งจ ์•ž์— ์žˆ๋Š” ๊ฒฝ์šฐ๋ฅผ ์ œ์™ธํ•˜๋ฉด ํ•ญ์ƒ ์™ผ์ชฝ ์Šคํƒ์˜ ๋ฌธ์ž๋ฅผ ํ•˜๋‚˜์”ฉ ์‚ญ์ œํ•˜๋ฉด ๋˜๋ฉฐ, ์‚ฝ์ž… ๋ช…๋ น์€ ํ•ญ์ƒ ์™ผ์ชฝ ์Šคํƒ์— ์‚ฝ์ž…ํ•˜๋ ค๋Š” ๋ฌธ์ž๋ฅผ ๋„ฃ์–ด์ฃผ๋ฉด ๋œ๋‹ค. + +## ์ฝ”๋“œ + +```python +import sys + +input = sys.stdin.readline + +left = list(input().rstrip()) +right = [] + +for _ in range(int(input())): + com = (list(input().split())) + if com[0] == 'L': + if left: + right.append(left.pop()) + + elif com[0] == 'D': + if right: + left.append(right.pop()) + + elif com[0] == 'B': + if left: + left.pop() + + elif com[0] == 'P': + left.append(com[1]) + +print(''.join(left+right[::-1])) +``` + +python์—์„œ๋Š” list์˜ `pop()` ํ•จ์ˆ˜๋กœ ์Šคํƒ์—์„œ ๊บผ๋‚ด๊ณ , `append()` ํ•จ์ˆ˜๋กœ ์Šคํƒ์— ๊ฐ’์„ ๋„ฃ๋Š” ๊ฒƒ์œผ๋กœ stack์„ ๊ฐ„๋‹จํ•˜๊ฒŒ ๊ตฌํ˜„ํ•  ์ˆ˜ ์žˆ๋‹ค. \ No newline at end of file diff --git a/_posts/2022-09-13-boj_24060.markdown b/_posts/2022-09-13-boj_24060.markdown new file mode 100644 index 00000000000..951c689ad4f --- /dev/null +++ b/_posts/2022-09-13-boj_24060.markdown @@ -0,0 +1,157 @@ +--- +title: "[BOJ] ๋ณ‘ํ•ฉ ์ •๋ ฌ 1 - 24060 (S3)" +author: kwon +date: 2022-09-13T23:00:00 +0900 +categories: [boj, silver] +tags: [implementation, sort, recursion] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 1 ์ดˆ | 512 MB | + +## ๋ฌธ์ œ + +์˜ค๋Š˜๋„ ์„œ์ค€์ด๋Š” ๋ณ‘ํ•ฉ ์ •๋ ฌย ์ˆ˜์—… ์กฐ๊ต๋ฅผ ํ•˜๊ณ  ์žˆ๋‹ค.ย ์•„๋น ๊ฐ€ ์ˆ˜์—…ํ•œย ๋‚ด์šฉ์„ ํ•™์ƒ๋“ค์ด ์ž˜ ์ดํ•ดํ–ˆ๋Š”์ง€ ๋ฌธ์ œ๋ฅผ ํ†ตํ•ด์„œ ํ™•์ธํ•ด๋ณด์ž. + +*N*๊ฐœ์˜ ์„œ๋กœ ๋‹ค๋ฅธ ์–‘์˜ย ์ •์ˆ˜๊ฐ€ ์ €์žฅ๋œย ๋ฐฐ์—ด A๊ฐ€ ์žˆ๋‹ค. ๋ณ‘ํ•ฉ ์ •๋ ฌ๋กœ ๋ฐฐ์—ด A๋ฅผ ์˜ค๋ฆ„์ฐจ์ˆœ ์ •๋ ฌํ•  ๊ฒฝ์šฐ ๋ฐฐ์—ด A์—ย *K*ย ๋ฒˆ์งธ ์ €์žฅ๋˜๋Š”ย ์ˆ˜๋ฅผ ๊ตฌํ•ด์„œ ์šฐ๋ฆฌ ์„œ์ค€์ด๋ฅผ ๋„์™€์ฃผ์ž. + +ํฌ๊ธฐ๊ฐ€ย *N*์ธ ๋ฐฐ์—ด์— ๋Œ€ํ•œย ๋ณ‘ํ•ฉ ์ •๋ ฌย ์˜์‚ฌ ์ฝ”๋“œ๋Š”ย ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. + +``` +merge_sort(A[p..r]) { # A[p..r]์„ ์˜ค๋ฆ„์ฐจ์ˆœ ์ •๋ ฌํ•œ๋‹ค. + if (p < r) then { + q <- โŒŠ(p + r) / 2โŒ‹; # q๋Š” p, r์˜ ์ค‘๊ฐ„ ์ง€์  +ย  merge_sort(A, p, q); # ์ „๋ฐ˜๋ถ€ ์ •๋ ฌ +ย  merge_sort(A, q + 1, r); # ํ›„๋ฐ˜๋ถ€ ์ •๋ ฌ +ย  merge(A, p, q, r); # ๋ณ‘ํ•ฉ +ย  } +} + +# A[p..q]์™€ A[q+1..r]์„ ๋ณ‘ํ•ฉํ•˜์—ฌ A[p..r]์„ ์˜ค๋ฆ„์ฐจ์ˆœ ์ •๋ ฌ๋œ ์ƒํƒœ๋กœ ๋งŒ๋“ ๋‹ค. +# A[p..q]์™€ A[q+1..r]์€ ์ด๋ฏธ ์˜ค๋ฆ„์ฐจ์ˆœ์œผ๋กœ ์ •๋ ฌ๋˜์–ด ์žˆ๋‹ค. +merge(A[], p, q, r) { + i <- p; j <- q + 1; t <- 1; + while (i โ‰ค q and j โ‰ค r) { + if (A[i] โ‰ค A[j]) +ย  then tmp[t++] <- A[i++]; # tmp[t] <- A[i]; t++; i++; +ย  else tmp[t++] <- A[j++]; # tmp[t] <- A[j]; t++; j++; +ย  } + while (i โ‰ค q) # ์™ผ์ชฝ ๋ฐฐ์—ด ๋ถ€๋ถ„์ด ๋‚จ์€ ๊ฒฝ์šฐ +ย  tmp[t++] <- A[i++]; +ย  while (j โ‰ค r) # ์˜ค๋ฅธ์ชฝ ๋ฐฐ์—ด ๋ถ€๋ถ„์ด ๋‚จ์€ ๊ฒฝ์šฐ +ย  tmp[t++] <- A[j++]; +ย  i <- p; t <- 1; +ย  while (i โ‰ค r) # ๊ฒฐ๊ณผ๋ฅผ A[p..r]์— ์ €์žฅ +ย  A[i++] <- tmp[t++]; +} +``` + +## ์ž…๋ ฅ + +์ฒซ์งธ ์ค„์— ๋ฐฐ์—ด A์˜ ํฌ๊ธฐย *N*(5ย โ‰คย *N*ย โ‰ค 500,000), ์ €์žฅ ํšŸ์ˆ˜ย *K*(1 โ‰คย *K*ย โ‰ค 108)๊ฐ€ย ์ฃผ์–ด์ง„๋‹ค. + +๋‹ค์Œย ์ค„์— ์„œ๋กœ ๋‹ค๋ฅธ ๋ฐฐ์—ด A์˜ ์›์†Œ A1, A2, ..., AN์ด ์ฃผ์–ด์ง„๋‹ค.ย (1ย โ‰ค Aiย โ‰ค 109) + +## ์ถœ๋ ฅ + +๋ฐฐ์—ด A์—ย *K*ย ๋ฒˆ์งธ ์ €์žฅ ๋˜๋Š”ย ์ˆ˜๋ฅผ ์ถœ๋ ฅํ•œ๋‹ค. ์ €์žฅ ํšŸ์ˆ˜๊ฐ€ย *K*ย ๋ณด๋‹ค ์ž‘์œผ๋ฉด -1์„ ์ถœ๋ ฅํ•œ๋‹ค. + +# ํ’€์ด + +๋ฌธ์ œ์— ์ž‘์„ฑ๋œ ์˜์‚ฌ ์ฝ”๋“œ๋ฅผ ๋”ฐ๋ผ ๋ณ‘ํ•ฉ ์ •๋ ฌ ์ฝ”๋“œ๋ฅผ ์™„์„ฑํ•˜์—ฌ ์ •๋ ฌ๋œ ๋ฐฐ์—ด A์— K๋ฒˆ์งธ ์ €์žฅ๋œ ์ˆ˜๋ฅผ ์ถœ๋ ฅํ•˜๋Š” ๋ฌธ์ œ์ด๋‹ค. + +## ๋ณ‘ํ•ฉ ์ •๋ ฌ + +๋ณ‘ํ•ฉ ์ •๋ ฌ์€ ์ฃผ์–ด์ง„ ๋ฐฐ์—ด์„ ์ ˆ๋ฐ˜์œผ๋กœ ๋‚˜๋ˆ„๋Š” ๊ณผ์ •๊ณผ ์ด๋ฅผ ๋‹ค์‹œ ํ•ฉ์น˜๋Š” ๋ณ‘ํ•ฉ ๊ณผ์ •์œผ๋กœ ์ง„ํ–‰๋œ๋‹ค. + +๋จผ์ € ๋ถ€๋ถ„ ๋ฐฐ์—ด(๋‚˜๋ˆ„์–ด์ง„ ๋ฐฐ์—ด)์˜ ์›์†Œ๊ฐ€ ํ•˜๋‚˜๋งŒ ๋‚จ๊ฑฐ๋‚˜ ๋ถ€๋ถ„ ๋ฐฐ์—ด์ด ๋นŒ ๋•Œ๊นŒ์ง€ ๋ฐฐ์—ด์„ ์ ˆ๋ฐ˜์œผ๋กœ ๋‚˜๋ˆ„์–ด ์ค€๋‹ค. ์ด๋Š” ์žฌ๊ท€๋ฅผ ์ด์šฉํ•˜์—ฌ ๋‹ค์Œ๊ณผ ๊ฐ™์ด ๊ตฌํ˜„ํ•  ์ˆ˜ ์žˆ๋‹ค. + +```python +def merge_sort(a, p, r): + global result + if result != -1: + return + if p < r: + q = (p + r) // 2 + merge_sort(a, p, q) + merge_sort(a, q+1, r) + ... +``` + +๋ชจ๋‘ ๋‹ค ๋‚˜๋ˆด์œผ๋ฉด ์ด์ œ ๋ณ‘ํ•ฉํ•˜๋ฉฐ ์ •๋ ฌ๋œ ๋ฐฐ์—ด์„ ์™„์„ฑํ•œ๋‹ค. + +๋ณ‘ํ•ฉํ•  ๋‘ ๋ฐฐ์—ด์˜ ์›์†Œ๋“ค์„ ํ™•์ธํ•˜๋ฉฐ ์ž‘์€ ์ˆœ์œผ๋กœ ๋ฐฐ์—ด์— ๋„ฃ์–ด์ค€๋‹ค. ํ•˜์ง€๋งŒ ๋‘ ๋ฐฐ์—ด์˜ ๊ธธ์ด๊ฐ€ ํ•ญ์ƒ ๊ฐ™์ง€๋Š” ์•Š์œผ๋ฏ€๋กœ ๋‚จ์€ ๋ฐฐ์—ด์˜ ์›์†Œ๋“ค๋„ ์ฐจ๋ก€๋Œ€๋กœ ๋„ฃ์–ด์ค€๋‹ค. ์ด ๋•Œ ๊ฐ ๋ถ€๋ถ„ ๋ฐฐ์—ด๋“ค์€ ์ด์ „์˜ ๋ณ‘ํ•ฉ์—์„œ ์ด๋ฏธ ์ •๋ ฌ๋œ ์ƒํƒœ์ด๋ฏ€๋กœ ๊ทธ๋ƒฅ ์ˆœ์„œ๋Œ€๋กœ ๋„ฃ์–ด์ฃผ๋ฉด ๋œ๋‹ค. + +์œ„ ๋ฐฉ์‹๋Œ€๋กœ ๋ชจ๋“  ๋ณ‘ํ•ฉ์„ ์™„๋ฃŒํ•˜๋ฉด ์ •๋ ฌ์ด ์™„๋ฃŒ๋œ๋‹ค. + +ํ•˜์ง€๋งŒ ์šฐ๋ฆฌ๊ฐ€ ์›ํ•˜๋Š” ๊ฒƒ์€ ํŠน์ • ๋ฒˆ์งธ์— ์ €์žฅ๋œ ์ˆ˜๋ฅผ ์ฐพ๋Š” ๊ฒƒ์ด๋ฏ€๋กœ ์ •๋ ฌํ•œ ๊ฐ’์„ ์ž„์‹œ๋กœ ์ €์žฅํ•œ ๋”•์…”๋„ˆ๋ฆฌ `tmp`์˜ ๊ฐ’์„ ๋ฐฐ์—ด `a`์— ์˜ฎ๊ฒจ์ฃผ๋ฉด์„œ ์›ํ•˜๋Š” ๋ฒˆ์งธ์— `a`์— ์ €์žฅ๋œ ๊ฐ’์„ ์ฐพ์•„๋‚ธ๋‹ค. + +```python + ... + while i <= r: + a[i] = tmp[t] + cnt += 1 + if cnt == k: + cnt = -float('inf') + result = tmp[t] + return + t += 1 + i += 1 + ... +``` + +## ์ฝ”๋“œ + +```python +a, k = map(int, input().split()) +nums = list(map(int, input().split())) +cnt = 0 +result = -1 +def merge_sort(a, p, r): + global result + if result != -1: + return + if p < r: + q = (p + r) // 2 + merge_sort(a, p, q) + merge_sort(a, q+1, r) + merge(a, p, q, r) + +def merge(a, p, q, r): + global cnt, result + i, j, t = p, q + 1, 0 + tmp = {} + while i <= q and j <= r: + if a[i] <= a[j]: + tmp[t] = a[i] + t += 1 + i += 1 + else: + tmp[t] = a[j] + t += 1 + j += 1 + while i <= q: + tmp[t] = a[i] + t += 1 + i += 1 + while j <= r: + tmp[t] = a[j] + t += 1 + j += 1 + i, t = p, 0 + while i <= r: + a[i] = tmp[t] + cnt += 1 + if cnt == k: + cnt = -float('inf') + result = tmp[t] + return + t += 1 + i += 1 + +merge_sort(nums, 0, a-1) +print(result) +``` \ No newline at end of file diff --git a/_posts/2022-09-26-boj_7568.markdown b/_posts/2022-09-26-boj_7568.markdown new file mode 100644 index 00000000000..dd48028a258 --- /dev/null +++ b/_posts/2022-09-26-boj_7568.markdown @@ -0,0 +1,60 @@ +--- +title: "[BOJ] ๋ฉ์น˜ - 7568 (S5)" +author: kwon +date: 2022-09-26T23:00:00 +0900 +categories: [boj, silver] +tags: [implementation, brute-force] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 1์ดˆ | 128 MB | + +# ๋ฌธ์ œ + +์šฐ๋ฆฌ๋Š” ์‚ฌ๋žŒ์˜ ๋ฉ์น˜๋ฅผ ํ‚ค์™€ ๋ชธ๋ฌด๊ฒŒ, ์ด ๋‘ ๊ฐœ์˜ ๊ฐ’์œผ๋กœ ํ‘œํ˜„ํ•˜์—ฌ ๊ทธ ๋“ฑ์ˆ˜๋ฅผ ๋งค๊ฒจ๋ณด๋ ค๊ณ  ํ•œ๋‹ค. ์–ด๋–ค ์‚ฌ๋žŒ์˜ ๋ชธ๋ฌด๊ฒŒ๊ฐ€ x kg์ด๊ณ  ํ‚ค๊ฐ€ y cm๋ผ๋ฉด ์ด ์‚ฌ๋žŒ์˜ ๋ฉ์น˜๋Š” (x, y)๋กœ ํ‘œ์‹œ๋œ๋‹ค. ๋‘ ์‚ฌ๋žŒ A ์™€ B์˜ ๋ฉ์น˜๊ฐ€ ๊ฐ๊ฐ (x, y), (p, q)๋ผ๊ณ  ํ•  ๋•Œ x > p ๊ทธ๋ฆฌ๊ณ  y > q ์ด๋ผ๋ฉด ์šฐ๋ฆฌ๋Š” A์˜ ๋ฉ์น˜๊ฐ€ B์˜ ๋ฉ์น˜๋ณด๋‹ค "๋” ํฌ๋‹ค"๊ณ  ๋งํ•œ๋‹ค. ์˜ˆ๋ฅผ ๋“ค์–ด ์–ด๋–ค A, B ๋‘ ์‚ฌ๋žŒ์˜ ๋ฉ์น˜๊ฐ€ ๊ฐ๊ฐ (56, 177), (45, 165) ๋ผ๊ณ  ํ•œ๋‹ค๋ฉด A์˜ ๋ฉ์น˜๊ฐ€ B๋ณด๋‹ค ํฐ ์…ˆ์ด ๋œ๋‹ค. ๊ทธ๋Ÿฐ๋ฐ ์„œ๋กœ ๋‹ค๋ฅธ ๋ฉ์น˜๋ผ๋ฆฌ ํฌ๊ธฐ๋ฅผ ์ •ํ•  ์ˆ˜ ์—†๋Š” ๊ฒฝ์šฐ๋„ ์žˆ๋‹ค. ์˜ˆ๋ฅผ ๋“ค์–ด ๋‘ ์‚ฌ๋žŒ C์™€ D์˜ ๋ฉ์น˜๊ฐ€ ๊ฐ๊ฐ (45, 181), (55, 173)์ด๋ผ๋ฉด ๋ชธ๋ฌด๊ฒŒ๋Š” D๊ฐ€ C๋ณด๋‹ค ๋” ๋ฌด๊ฒ๊ณ , ํ‚ค๋Š” C๊ฐ€ ๋” ํฌ๋ฏ€๋กœ, "๋ฉ์น˜"๋กœ๋งŒ ๋ณผ ๋•Œ C์™€ D๋Š” ๋ˆ„๊ตฌ๋„ ์ƒ๋Œ€๋ฐฉ๋ณด๋‹ค ๋” ํฌ๋‹ค๊ณ  ๋งํ•  ์ˆ˜ ์—†๋‹ค. + +N๋ช…์˜ ์ง‘๋‹จ์—์„œ ๊ฐ ์‚ฌ๋žŒ์˜ ๋ฉ์น˜ ๋“ฑ์ˆ˜๋Š” ์ž์‹ ๋ณด๋‹ค ๋” "ํฐ ๋ฉ์น˜"์˜ ์‚ฌ๋žŒ์˜ ์ˆ˜๋กœ ์ •ํ•ด์ง„๋‹ค. ๋งŒ์ผ ์ž์‹ ๋ณด๋‹ค ๋” ํฐ ๋ฉ์น˜์˜ ์‚ฌ๋žŒ์ด k๋ช…์ด๋ผ๋ฉด ๊ทธ ์‚ฌ๋žŒ์˜ ๋ฉ์น˜ ๋“ฑ์ˆ˜๋Š” k+1์ด ๋œ๋‹ค. ์ด๋ ‡๊ฒŒ ๋“ฑ์ˆ˜๋ฅผ ๊ฒฐ์ •ํ•˜๋ฉด ๊ฐ™์€ ๋ฉ์น˜ ๋“ฑ์ˆ˜๋ฅผ ๊ฐ€์ง„ ์‚ฌ๋žŒ์€ ์—ฌ๋Ÿฌ ๋ช…๋„ ๊ฐ€๋Šฅํ•˜๋‹ค. ์•„๋ž˜๋Š” 5๋ช…์œผ๋กœ ์ด๋ฃจ์–ด์ง„ ์ง‘๋‹จ์—์„œ ๊ฐ ์‚ฌ๋žŒ์˜ ๋ฉ์น˜์™€ ๊ทธ ๋“ฑ์ˆ˜๊ฐ€ ํ‘œ์‹œ๋œ ํ‘œ์ด๋‹ค. + +| ์ด๋ฆ„ | (๋ชธ๋ฌด๊ฒŒ, ํ‚ค) | ๋ฉ์น˜ ๋“ฑ์ˆ˜ | +| --- | --- | --- | +| A | (55, 185) | 2 | +| B | (58, 183) | 2 | +| C | (88, 186) | 1 | +| D | (60, 175) | 2 | +| E | (46, 155) | 5 | + +์œ„ ํ‘œ์—์„œ C๋ณด๋‹ค ๋” ํฐ ๋ฉ์น˜์˜ ์‚ฌ๋žŒ์ด ์—†์œผ๋ฏ€๋กœ C๋Š” 1๋“ฑ์ด ๋œ๋‹ค. ๊ทธ๋ฆฌ๊ณ  A, B, D ๊ฐ๊ฐ์˜ ๋ฉ์น˜๋ณด๋‹ค ํฐ ์‚ฌ๋žŒ์€ C๋ฟ์ด๋ฏ€๋กœ ์ด๋“ค์€ ๋ชจ๋‘ 2๋“ฑ์ด ๋œ๋‹ค. ๊ทธ๋ฆฌ๊ณ  E๋ณด๋‹ค ํฐ ๋ฉ์น˜๋Š” A, B, C, D ์ด๋ ‡๊ฒŒ 4๋ช…์ด๋ฏ€๋กœ E์˜ ๋ฉ์น˜๋Š” 5๋“ฑ์ด ๋œ๋‹ค. ์œ„ ๊ฒฝ์šฐ์— 3๋“ฑ๊ณผ 4๋“ฑ์€ ์กด์žฌํ•˜์ง€ ์•Š๋Š”๋‹ค. ์—ฌ๋Ÿฌ๋ถ„์€ ํ•™์ƒ N๋ช…์˜ ๋ชธ๋ฌด๊ฒŒ์™€ ํ‚ค๊ฐ€ ๋‹ด๊ธด ์ž…๋ ฅ์„ ์ฝ์–ด์„œ ๊ฐ ์‚ฌ๋žŒ์˜ ๋ฉ์น˜ ๋“ฑ์ˆ˜๋ฅผ ๊ณ„์‚ฐํ•˜์—ฌ ์ถœ๋ ฅํ•ด์•ผ ํ•œ๋‹ค. + +# ์ž…๋ ฅ + +์ฒซ ์ค„์—๋Š” ์ „์ฒด ์‚ฌ๋žŒ์˜ ์ˆ˜ N์ด ์ฃผ์–ด์ง„๋‹ค. ๊ทธ๋ฆฌ๊ณ  ์ด์–ด์ง€๋Š” N๊ฐœ์˜ ์ค„์—๋Š” ๊ฐ ์‚ฌ๋žŒ์˜ ๋ชธ๋ฌด๊ฒŒ์™€ ํ‚ค๋ฅผ ๋‚˜ํƒ€๋‚ด๋Š” ์–‘์˜ ์ •์ˆ˜ x์™€ y๊ฐ€ ํ•˜๋‚˜์˜ ๊ณต๋ฐฑ์„ ๋‘๊ณ  ๊ฐ๊ฐ ๋‚˜ํƒ€๋‚œ๋‹ค. + +# ์ถœ๋ ฅ + +์—ฌ๋Ÿฌ๋ถ„์€ ์ž…๋ ฅ์— ๋‚˜์—ด๋œ ์‚ฌ๋žŒ์˜ ๋ฉ์น˜ ๋“ฑ์ˆ˜๋ฅผ ๊ตฌํ•ด์„œ ๊ทธ ์ˆœ์„œ๋Œ€๋กœ ์ฒซ ์ค„์— ์ถœ๋ ฅํ•ด์•ผ ํ•œ๋‹ค. ๋‹จ, ๊ฐ ๋ฉ์น˜ ๋“ฑ์ˆ˜๋Š” ๊ณต๋ฐฑ๋ฌธ์ž๋กœ ๋ถ„๋ฆฌ๋˜์–ด์•ผ ํ•œ๋‹ค. + +# ํ’€์ด + +๋ชจ๋“  ์‚ฌ๋žŒ๋“ค์„ ๋น„๊ตํ•˜๋ฉด์„œ ํ‚ค์™€ ๋ชธ๋ฌด๊ฒŒ๊ฐ€ ๋‘˜ ๋‹ค ์ž‘์„ ๊ฒฝ์šฐ ํ•ด๋‹น ์‚ฌ๋žŒ์˜ ์ธ๋ฑ์Šค์— ์นด์šดํŠธ๋ฅผ ํ•˜๋‚˜ ์˜ฌ๋ฆฐ๋‹ค (๋“ฑ์ˆ˜๋ฅผ ํ•˜๋‚˜ ๋‚ด๋ฆผ). ๋งˆ์ง€๋ง‰์— ์นด์šดํŠธํ•œ ๋ฐฐ์—ด์„ ์ถœ๋ ฅํ•œ๋‹ค. ๋“ฑ์ˆ˜๋Š” 1๋“ฑ๋ถ€ํ„ฐ ์‹œ์ž‘ํ•˜๊ธฐ ๋•Œ๋ฌธ์— ๋“ฑ์ˆ˜(์นด์šดํŠธ)๋ฅผ ์ €์žฅํ•˜๋Š” ๋ฐฐ์—ด์˜ ๊ฐ’์€ 1๋กœ ์ •์˜ํ•˜๊ณ  ์‹œ์ž‘ํ•œ๋‹ค. + +## ์ฝ”๋“œ + +```python +n = int(input()) +l = [] +cnt = [1] * n +for _ in range(n): + l.append(tuple(map(int, input().split()))) + +for i in range(n): + for j in range(i+1, n): + p1, p2 = l[i], l[j] + if p1[0] > p2[0] and p1[1] > p2[1]: + cnt[j] += 1 + elif p1[0] < p2[0] and p1[1] < p2[1]: + cnt[i] += 1 +print(' '.join(map(str, cnt))) +``` \ No newline at end of file diff --git a/_posts/2022-10-04-boj_1269.markdown b/_posts/2022-10-04-boj_1269.markdown new file mode 100644 index 00000000000..9c064dba57a --- /dev/null +++ b/_posts/2022-10-04-boj_1269.markdown @@ -0,0 +1,42 @@ +--- +title: "[BOJ] ๋Œ€์นญ ์ฐจ์ง‘ํ•ฉ - 1269 (S4)" +author: kwon +date: 2022-10-4T14:00:00 +0900 +categories: [boj, silver] +tags: [data structure, set / map by hashing, set / map by tree] +math: true +mermaid: false +--- + +22/10/4 + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 2 ์ดˆ | 256 MB | + +# ๋ฌธ์ œ + +์ž์—ฐ์ˆ˜๋ฅผ ์›์†Œ๋กœ ๊ฐ–๋Š” ๊ณต์ง‘ํ•ฉ์ด ์•„๋‹Œ ๋‘ ์ง‘ํ•ฉ A์™€ B๊ฐ€ ์žˆ๋‹ค. ์ด๋•Œ, ๋‘ ์ง‘ํ•ฉ์˜ ๋Œ€์นญ ์ฐจ์ง‘ํ•ฉ์˜ ์›์†Œ์˜ ๊ฐœ์ˆ˜๋ฅผ ์ถœ๋ ฅํ•˜๋Š” ํ”„๋กœ๊ทธ๋žจ์„ ์ž‘์„ฑํ•˜์‹œ์˜ค. ๋‘ ์ง‘ํ•ฉ A์™€ B๊ฐ€ ์žˆ์„ ๋•Œ, (A-B)์™€ (B-A)์˜ ํ•ฉ์ง‘ํ•ฉ์„ A์™€ B์˜ ๋Œ€์นญ ์ฐจ์ง‘ํ•ฉ์ด๋ผ๊ณ  ํ•œ๋‹ค. + +์˜ˆ๋ฅผ ๋“ค์–ด, A = { 1, 2, 4 } ์ด๊ณ , B = { 2, 3, 4, 5, 6 } ๋ผ๊ณ  ํ•  ๋•Œ,ย  A-B = { 1 } ์ด๊ณ , B-A = { 3, 5, 6 } ์ด๋ฏ€๋กœ, ๋Œ€์นญ ์ฐจ์ง‘ํ•ฉ์˜ ์›์†Œ์˜ ๊ฐœ์ˆ˜๋Š” 1 + 3 = 4๊ฐœ์ด๋‹ค. + +# ์ž…๋ ฅ + +์ฒซ์งธ ์ค„์— ์ง‘ํ•ฉ A์˜ ์›์†Œ์˜ ๊ฐœ์ˆ˜์™€ ์ง‘ํ•ฉ B์˜ ์›์†Œ์˜ ๊ฐœ์ˆ˜๊ฐ€ ๋นˆ ์นธ์„ ์‚ฌ์ด์— ๋‘๊ณ  ์ฃผ์–ด์ง„๋‹ค. ๋‘˜์งธ ์ค„์—๋Š” ์ง‘ํ•ฉ A์˜ ๋ชจ๋“  ์›์†Œ๊ฐ€, ์…‹์งธ ์ค„์—๋Š” ์ง‘ํ•ฉ B์˜ ๋ชจ๋“  ์›์†Œ๊ฐ€ ๋นˆ ์นธ์„ ์‚ฌ์ด์— ๋‘๊ณ  ๊ฐ๊ฐ ์ฃผ์–ด์ง„๋‹ค. ๊ฐ ์ง‘ํ•ฉ์˜ ์›์†Œ์˜ ๊ฐœ์ˆ˜๋Š” 200,000์„ ๋„˜์ง€ ์•Š์œผ๋ฉฐ, ๋ชจ๋“  ์›์†Œ์˜ ๊ฐ’์€ 100,000,000์„ ๋„˜์ง€ ์•Š๋Š”๋‹ค. + +# ์ถœ๋ ฅ + +์ฒซ์งธ ์ค„์— ๋Œ€์นญ ์ฐจ์ง‘ํ•ฉ์˜ ์›์†Œ์˜ ๊ฐœ์ˆ˜๋ฅผ ์ถœ๋ ฅํ•œ๋‹ค. + +# ํ’€์ด + +๋Œ€์นญ ์ฐจ์ง‘ํ•ฉ์˜ ์ •์˜์— ๋”ฐ๋ผ ๊ทธ๋Œ€๋กœ ์ฝ”๋“œ๋กœ ์ž‘์„ฑํ•˜๋ฉด ๋˜๋Š” ๋ฌธ์ œ๋‹ค. ๋Œ€์นญ ์ฐจ์ง‘ํ•ฉ์˜ ๊ฐ ์ฐจ์ง‘ํ•ฉ (A-B)์™€ (B-A)๋Š” ๊ต์ง‘ํ•ฉ์ด ๋ฌด์กฐ๊ฑด ๊ณต์ง‘ํ•ฉ์ด๋ฏ€๋กœ ๊ฐ ์ฐจ์ง‘ํ•ฉ์˜ ์›์†Œ ๊ฐœ์ˆ˜๋ฅผ ๋”ํ•˜๋Š” ๊ฒƒ์œผ๋กœ ๋Œ€์นญ ์ฐจ์ง‘ํ•ฉ์˜ ์›์†Œ ๊ฐœ์ˆ˜๋ฅผ ๊ตฌํ•  ์ˆ˜ ์žˆ๋‹ค. + +## ์ฝ”๋“œ + +```python +input() +a = set(map(int, input().split())) +b = set(map(int, input().split())) +print(len(a - b) + len(b - a)) +``` \ No newline at end of file diff --git a/_posts/2022-10-11-boj_10989.markdown b/_posts/2022-10-11-boj_10989.markdown new file mode 100644 index 00000000000..25c8643bda4 --- /dev/null +++ b/_posts/2022-10-11-boj_10989.markdown @@ -0,0 +1,49 @@ +--- +title: "[BOJ] ์ˆ˜ ์ •๋ ฌํ•˜๊ธฐ 3 - 10989 (B1)" +author: kwon +date: 2022-10-11T23:00:00 +0900 +categories: [boj, bronze] +tags: [sort] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 2 ์ดˆ | 512 MB | + +# ๋ฌธ์ œ + +N๊ฐœ์˜ ์ˆ˜๊ฐ€ ์ฃผ์–ด์กŒ์„ ๋•Œ, ์ด๋ฅผย ์˜ค๋ฆ„์ฐจ์ˆœ์œผ๋กœ ์ •๋ ฌํ•˜๋Š” ํ”„๋กœ๊ทธ๋žจ์„ ์ž‘์„ฑํ•˜์‹œ์˜ค. + +# ์ž…๋ ฅ + +์ฒซ์งธ ์ค„์— ์ˆ˜์˜ ๊ฐœ์ˆ˜ N(1 โ‰คย N โ‰ค 10,000,000)์ด ์ฃผ์–ด์ง„๋‹ค. ๋‘˜์งธ ์ค„๋ถ€ํ„ฐ N๊ฐœ์˜ ์ค„์—๋Š” ์ˆ˜๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. ์ด ์ˆ˜๋Š”ย 10,000๋ณด๋‹ค ์ž‘๊ฑฐ๋‚˜ ๊ฐ™์€ ์ž์—ฐ์ˆ˜์ด๋‹ค. + +# ์ถœ๋ ฅ + +์ฒซ์งธ ์ค„๋ถ€ํ„ฐ N๊ฐœ์˜ ์ค„์— ์˜ค๋ฆ„์ฐจ์ˆœ์œผ๋กœ ์ •๋ ฌํ•œ ๊ฒฐ๊ณผ๋ฅผ ํ•œ ์ค„์— ํ•˜๋‚˜์”ฉ ์ถœ๋ ฅํ•œ๋‹ค. + +# ํ’€์ด + +์ค‘๋ณต๋œ ์ˆ˜๋ฅผ ์—ฌ๋Ÿฌ๋ฒˆ ์ •๋ ฌํ•˜๋Š” ๊ฒƒ์„ ํ”ผํ•˜๊ธฐ ์œ„ํ•ด ๋”•์…”๋„ˆ๋ฆฌ์— ์ˆซ์ž : ๊ฐœ์ˆ˜ ํ˜•ํƒœ๋กœ ์ €์žฅํ•œ ๋’ค์— key๋ฅผ ์ •๋ ฌํ•˜์—ฌ ์ˆ˜์˜ ๊ฐœ์ˆ˜๋งŒํผ ๊ทธ ์ˆ˜(key)๋ฅผ ์ถœ๋ ฅํ•ด์ค€๋‹ค. + +## ์ฝ”๋“œ + +```python +import sys +from collections import defaultdict +input = sys.stdin.readline +print = sys.stdout.write + +n = int(input()) +nums = [] +d = defaultdict(int) + +for _ in range(n): + d[int(input())] += 1 + +for num in sorted(d): + for _ in range(d[num]): + print(f'{str(num)}\n') +``` \ No newline at end of file diff --git a/_posts/2022-10-13-boj_11650-11651.markdown b/_posts/2022-10-13-boj_11650-11651.markdown new file mode 100644 index 00000000000..3128389e92a --- /dev/null +++ b/_posts/2022-10-13-boj_11650-11651.markdown @@ -0,0 +1,83 @@ +--- +title: "[BOJ] ์ขŒํ‘œ ์ •๋ ฌ 1, 2 - 11650, 11651 (S5)" +author: kwon +date: 2022-10-13T23:00:00 +0900 +categories: [boj, silver] +tags: [sort] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 2 ์ดˆ | 256 MB | + +# ๋ฌธ์ œ + +์ขŒํ‘œ ์ •๋ ฌํ•˜๊ธฐ 1: 2์ฐจ์› ํ‰๋ฉด ์œ„์˜ ์  N๊ฐœ๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. ์ขŒํ‘œ๋ฅผ x์ขŒํ‘œ๊ฐ€ ์ฆ๊ฐ€ํ•˜๋Š” ์ˆœ์œผ๋กœ, x์ขŒํ‘œ๊ฐ€ ๊ฐ™์œผ๋ฉด y์ขŒํ‘œ๊ฐ€ ์ฆ๊ฐ€ํ•˜๋Š” ์ˆœ์„œ๋กœ ์ •๋ ฌํ•œ ๋‹ค์Œ ์ถœ๋ ฅํ•˜๋Š” ํ”„๋กœ๊ทธ๋žจ์„ ์ž‘์„ฑํ•˜์‹œ์˜ค. + +์ขŒํ‘œ ์ •๋ ฌํ•˜๊ธฐ 2: 2์ฐจ์› ํ‰๋ฉด ์œ„์˜ ์  N๊ฐœ๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. ์ขŒํ‘œ๋ฅผ y์ขŒํ‘œ๊ฐ€ ์ฆ๊ฐ€ํ•˜๋Š” ์ˆœ์œผ๋กœ, y์ขŒํ‘œ๊ฐ€ ๊ฐ™์œผ๋ฉด x์ขŒํ‘œ๊ฐ€ ์ฆ๊ฐ€ํ•˜๋Š” ์ˆœ์„œ๋กœ ์ •๋ ฌํ•œ ๋‹ค์Œ ์ถœ๋ ฅํ•˜๋Š” ํ”„๋กœ๊ทธ๋žจ์„ ์ž‘์„ฑํ•˜์‹œ์˜ค. + +# ์ž…๋ ฅ + +์ฒซ์งธ ์ค„์— ์ ์˜ ๊ฐœ์ˆ˜ N (1 โ‰ค N โ‰ค 100,000)์ด ์ฃผ์–ด์ง„๋‹ค. ๋‘˜์งธ ์ค„๋ถ€ํ„ฐ N๊ฐœ์˜ ์ค„์—๋Š” i๋ฒˆ์ ์˜ ์œ„์น˜ xi์™€ yi๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. (-100,000 โ‰ค xi, yiย โ‰ค 100,000) ์ขŒํ‘œ๋Š” ํ•ญ์ƒ ์ •์ˆ˜์ด๊ณ , ์œ„์น˜๊ฐ€ ๊ฐ™์€ ๋‘ ์ ์€ ์—†๋‹ค. + +# ์ถœ๋ ฅ + +์ฒซ์งธ ์ค„๋ถ€ํ„ฐ N๊ฐœ์˜ ์ค„์— ์ ์„ ์ •๋ ฌํ•œย ๊ฒฐ๊ณผ๋ฅผ ์ถœ๋ ฅํ•œ๋‹ค. + +# ํ’€์ด + +1, 2 ๋ชจ๋‘ ์ฃผ์–ด์ง„ ์ขŒํ‘œ๋ฅผ ์ •๋ ฌํ•˜๋Š” ๋ฌธ์ œ์ด๋‹ค. ๋‹ค๋ฅธ ์ ์€ 1์€ x์ขŒํ‘œ๋ฅผ ์šฐ์„ ์œผ๋กœ, 2๋Š” y์ขŒํ‘œ๋ฅผ ์šฐ์„ ์œผ๋กœ ์ •๋ ฌํ•œ๋‹ค๋Š” ๊ฒƒ์ด๋‹ค. ์ฃผ์–ด์ง„ ์ž…๋ ฅ์˜ x, y ๊ฐ’์˜ ๋ฒ”์œ„๊ฐ€ (-100,000 โ‰ค xi, yiย โ‰ค 100,000)๋กœ ์ œํ•œ์ ์ด๊ณ  ์ •์ˆ˜์ด๊ธฐ ๋•Œ๋ฌธ์— Counting Sort๋ฅผ ์‚ฌ์šฉํ•˜๊ธฐ์— ์ ํ•ฉํ•˜๋‹ค. + +์ˆ˜์˜ ๊ฐœ์ˆ˜๋ฅผ ์„ธ์–ด์ค„ ๋ฐฐ์—ด์˜ ํฌ๊ธฐ๋ฅผ x, y์˜ ๋ฒ”์œ„์— ๋งž๊ฒŒ 200,002๋กœ ์žก์•„์ค€๋‹ค. 200,001๋กœ ํฌ๊ธฐ๋ฅผ ์žก์ง€ ์•Š์€ ์ด์œ ๋Š” ๊น”๋”ํ•˜๊ฒŒ 100,000์„ ๋”ํ•ด์„œ ์ธ๋ฑ์Šค๊ฐ€ ํ—ท๊ฐˆ๋ฆฌ์ง€ ์•Š๊ฒŒ ์ฝ”๋“œ๋ฅผ ์ž‘์„ฑํ•  ์ˆ˜ ์žˆ๊ธฐ ๋•Œ๋ฌธ์ด๋‹ค. + +๋ฉ”๋ชจ๋ฆฌ๋ฅผ ์ ๊ฒŒ ์จ์„œ ๊ณต๊ฐ„๋ณต์žก๋„๋ฅผ ์ค„์ด๋Š” ๊ฒƒ๋„ ์ค‘์š”ํ•˜์ง€๋งŒ, ์ด ๋ฌธ์ œ์™€ ๊ฐ™์ด ์ถฉ๋ถ„ํ•œ ๋ฉ”๋ชจ๋ฆฌ๊ฐ€ ์ฃผ์–ด์ง„ ์ƒํ™ฉ์—์„œ๋Š” ๋ฐฐ์—ด ํ•œ ์ž๋ฆฌ์˜ ๋ฉ”๋ชจ๋ฆฌ๋ฅผ ์ค„์ด๋Š” ๊ฒƒ๋ณด๋‹ค ํ—ท๊ฐˆ๋ฆฌ๋Š” ๋ถ€๋ถ„์„ ์—†์• ๋ฒ„๋ฆฌ๊ณ  ๊ฐ€๋…์„ฑ์„ ๋†’์—ฌ ๊ฑฐ๊ธฐ์„œ ๋ฐœ์ƒํ•  ์ˆ˜ ์žˆ๋Š” ์‹ค์ˆ˜๋ฅผ ์ค„์ด๋Š” ๊ฒƒ์ด ์ด๋“์ด๋ผ๊ณ  ํŒ๋‹จํ–ˆ๋‹ค. + +ํ•˜๋‚˜์˜ ์ˆ˜์—ด์„ ์ •๋ ฌํ•˜๋Š” ๊ฒฝ์šฐ์—๋Š” ์ฃผ์–ด์ง„ ์ˆ˜์˜ ๊ฐœ์ˆ˜๋งŒ ์ €์žฅํ•˜๋ฉด ๋˜๊ฒ ์ง€๋งŒ ์ขŒํ‘œ๋ฅผ ์ •๋ ฌํ•˜๋Š” ๊ฒฝ์šฐ์—๋Š” ์šฐ์„ ์ˆœ์œ„๊ฐ€ ์•„๋‹Œ ์ˆ˜๋“ค(1๋ฒˆ ๋ฌธ์ œ์˜ ๊ฒฝ์šฐ y์ขŒํ‘œ)๋„ ์‹ ๊ฒฝ์จ์•ผํ•˜๊ธฐ ๋•Œ๋ฌธ์— ํ•ด๋‹นํ•˜๋Š” x์ขŒํ‘œ์— y์ขŒํ‘œ์˜ ๋ฐฐ์—ด์„ ์ €์žฅํ•ด์ฃผ์—ˆ๋‹ค. + +์ด๋ ‡๊ฒŒ ๋ฐฐ์—ด์„ ๋งŒ๋“ค๊ฒŒ ๋˜๋ฉด x์ขŒํ‘œ์˜ ํฌ๊ธฐ์— ๋”ฐ๋ผ y์ขŒํ‘œ์˜ ๋ฐฐ์—ด๋“ค์ด ์ €์žฅ๋˜๋ฏ€๋กœ y์ขŒํ‘œ๊ฐ€ ์ €์žฅ๋œ ๊ฒฝ์šฐ์—๋งŒ ์ •๋ ฌํ•˜์—ฌ ์ขŒํ‘œ๋ฅผ ์ถœ๋ ฅํ•˜๋ฉด ๋œ๋‹ค. + +## ์ฝ”๋“œ + +```python +import sys +input = sys.stdin.readline +print = sys.stdout.write + +n = int(input()) +cnt_list = [[] for i in range(200002)] +for _ in range(n): + x, y = map(int, input().split()) + cnt_list[x + 100000].append(y) + +for x, ys in enumerate(cnt_list): + if ys: + if len(ys) == 1: + print(f"{x - 100000} {ys[0]}\n") + else: + for y in sorted(ys): + print(f"{x - 100000} {y}\n") +``` + +2๋ฒˆ ๋ฌธ์ œ๋„ ๋งˆ์ฐฌ๊ฐ€์ง€๋กœ ์ง„ํ–‰ํ•˜๋˜ y๋ฅผ ๊ธฐ์ค€์œผ๋กœ ๋ฐฐ์—ด์„ ๋งŒ๋“ค์–ด x์ขŒํ‘œ๋ฅผ appendํ•ด์ค€๋‹ค. + +```python +import sys +input = sys.stdin.readline +print = sys.stdout.write + +n = int(input()) +cnt_list = [[] for i in range(200002)] +for _ in range(n): + x, y = map(int, input().split()) + cnt_list[y + 100000].append(x) + +for y, xs in enumerate(cnt_list): + if xs: + if len(xs) == 1: + print(f"{xs[0]} {y - 100000}\n") + else: + for x in sorted(xs): + print(f"{x} {y - 100000}\n") +``` \ No newline at end of file diff --git a/_posts/2022-10-13-boj_1181.markdown b/_posts/2022-10-13-boj_1181.markdown new file mode 100644 index 00000000000..3c963dc4b00 --- /dev/null +++ b/_posts/2022-10-13-boj_1181.markdown @@ -0,0 +1,56 @@ +--- +title: "[BOJ] ๋‹จ์–ด ์ •๋ ฌ - 1181 (S5)" +author: kwon +date: 2022-10-13T23:00:00 +0900 +categories: [boj, silver] +tags: [string, sort] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 2 ์ดˆ | 256 MB | + +# ๋ฌธ์ œ + +์•ŒํŒŒ๋ฒณ ์†Œ๋ฌธ์ž๋กœ ์ด๋ฃจ์–ด์ง„ N๊ฐœ์˜ ๋‹จ์–ด๊ฐ€ ๋“ค์–ด์˜ค๋ฉด ์•„๋ž˜์™€ ๊ฐ™์€ ์กฐ๊ฑด์— ๋”ฐ๋ผ ์ •๋ ฌํ•˜๋Š” ํ”„๋กœ๊ทธ๋žจ์„ ์ž‘์„ฑํ•˜์‹œ์˜ค. + +1. ๊ธธ์ด๊ฐ€ ์งง์€ ๊ฒƒ๋ถ€ํ„ฐ +2. ๊ธธ์ด๊ฐ€ ๊ฐ™์œผ๋ฉด ์‚ฌ์ „ ์ˆœ์œผ๋กœ + +# ์ž…๋ ฅ + +์ฒซ์งธ ์ค„์— ๋‹จ์–ด์˜ ๊ฐœ์ˆ˜ N์ด ์ฃผ์–ด์ง„๋‹ค. (1 โ‰ค N โ‰ค 20,000) ๋‘˜์งธ ์ค„๋ถ€ํ„ฐ N๊ฐœ์˜ ์ค„์— ๊ฑธ์ณ ์•ŒํŒŒ๋ฒณ ์†Œ๋ฌธ์ž๋กœ ์ด๋ฃจ์–ด์ง„ ๋‹จ์–ด๊ฐ€ ํ•œ ์ค„์— ํ•˜๋‚˜์”ฉ ์ฃผ์–ด์ง„๋‹ค. ์ฃผ์–ด์ง€๋Š” ๋ฌธ์ž์—ด์˜ ๊ธธ์ด๋Š” 50์„ ๋„˜์ง€ ์•Š๋Š”๋‹ค. + +# ์ถœ๋ ฅ + +์กฐ๊ฑด์— ๋”ฐ๋ผ ์ •๋ ฌํ•˜์—ฌ ๋‹จ์–ด๋“ค์„ ์ถœ๋ ฅํ•œ๋‹ค. ๋‹จ, ๊ฐ™์€ ๋‹จ์–ด๊ฐ€ ์—ฌ๋Ÿฌ ๋ฒˆ ์ž…๋ ฅ๋œ ๊ฒฝ์šฐ์—๋Š” ํ•œ ๋ฒˆ์”ฉ๋งŒ ์ถœ๋ ฅํ•œ๋‹ค. + +# ํ’€์ด + +์ž…๋ ฅ๋œ ๋‹จ์–ด์˜ ๊ธธ์ด๋ฅผ index๋กœ ํ•ด๋‹น ๊ธธ์ด์˜ ๋‹จ์–ด ๋ฌถ์Œ์„ ์›์†Œ๋กœ ํ•˜๋Š” ๋ฆฌ์ŠคํŠธ๋ฅผ ๋งŒ๋“ค์–ด ์ค€๋‹ค. ์ด ๋•Œ ๋‹จ์–ด๋Š” ์ค‘๋ณต์„ ํ—ˆ์šฉํ•˜์ง€ ์•Š๊ธฐ ๋•Œ๋ฌธ์— ๋ฌถ์Œ์„ ์ง‘ํ•ฉ์œผ๋กœ ๋งŒ๋“ค์–ด ์ค€๋‹ค. + +์ดํ›„ ๋ฆฌ์ŠคํŠธ๋ฅผ ๋ฐ˜๋ณตํ•˜๋ฉด์„œ ๋‹จ์–ด๊ฐ€ ์กด์žฌํ•˜๋Š” ๊ฒฝ์šฐ ๋‹จ์–ด๋“ค์„ ์ถœ๋ ฅํ•ด ์ค€๋‹ค. ๋‹จ์–ด๊ฐ€ ์—ฌ๋Ÿฌ ๊ฐœ๋ฉด list๋กœ ๋ฐ”๊ฟ”(set์€ ์ˆœ์„œ๊ฐ€ ์—†๋‹ค) ์ •๋ ฌํ•œ ๊ฒƒ์„ ์ฐจ๋ก€๋Œ€๋กœ ์ถœ๋ ฅํ•ด ์ค€๋‹ค. + +## ์ฝ”๋“œ + +```python +import sys +input = sys.stdin.readline +print = sys.stdout.write + +n = int(input()) +len_cnt = [set() for i in range(51)] +for _ in range(n): + word = input().strip() + len_cnt[len(word)].add(word) + +for words in len_cnt: + if words: + if len(words) == 1: + print(f"{list(words)[0]}\n") + else: + for w in sorted(list(words)): + print(f"{w}\n") +``` \ No newline at end of file diff --git a/_posts/2022-10-13-boj_1427.markdown b/_posts/2022-10-13-boj_1427.markdown new file mode 100644 index 00000000000..c23d387ec30 --- /dev/null +++ b/_posts/2022-10-13-boj_1427.markdown @@ -0,0 +1,49 @@ +--- +title: "[BOJ] ์†ŒํŠธ ์ธ์‚ฌ์ด๋“œ - 1427 (S5)" +author: kwon +date: 2022-10-13T14:00:00 +0900 +categories: [boj, silver] +tags: [string, sort] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 2 ์ดˆ | 128 MB | + +# ๋ฌธ์ œ + +๋ฐฐ์—ด์„ ์ •๋ ฌํ•˜๋Š” ๊ฒƒ์€ ์‰ฝ๋‹ค. ์ˆ˜๊ฐ€ ์ฃผ์–ด์ง€๋ฉด, ๊ทธ ์ˆ˜์˜ ๊ฐ ์ž๋ฆฌ์ˆ˜๋ฅผ ๋‚ด๋ฆผ์ฐจ์ˆœ์œผ๋กœ ์ •๋ ฌํ•ด๋ณด์ž. + +# ์ž…๋ ฅ + +์ฒซ์งธ ์ค„์— ์ •๋ ฌํ•˜๋ ค๊ณ  ํ•˜๋Š” ์ˆ˜ N์ด ์ฃผ์–ด์ง„๋‹ค. N์€ 1,000,000,000๋ณด๋‹ค ์ž‘๊ฑฐ๋‚˜ ๊ฐ™์€ ์ž์—ฐ์ˆ˜์ด๋‹ค. + +# ์ถœ๋ ฅ + +์ฒซ์งธ ์ค„์— ์ž๋ฆฌ์ˆ˜๋ฅผ ๋‚ด๋ฆผ์ฐจ์ˆœ์œผ๋กœ ์ •๋ ฌํ•œ ์ˆ˜๋ฅผ ์ถœ๋ ฅํ•œ๋‹ค. + +# ํ’€์ด + +์ฃผ์–ด์ง„ ์ˆ˜๋ฅผ ๋‚ด๋ฆผ์ฐจ์ˆœ ์ •๋ ฌํ•˜๋Š” ๊ฐ„๋‹จํ•œ ๋ฌธ์ œ์ด๋‹ค. ์ •์ˆ˜๋กœ ์ˆ˜์—ด์ด ์ด๋ฃจ์–ด์ ธ ์žˆ๊ธฐ ๋•Œ๋ฌธ์— counting sort๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ํ’€์—ˆ๋‹ค. + +## ์ฝ”๋“œ + +```python +import sys +input = sys.stdin.readline + +num = input().strip() +cnt_dict = {str(n):0 for n in range(9, -1, -1)} +for i in num: + cnt_dict[i] += 1 +ans = '' +for i in cnt_dict: + for _ in range(cnt_dict[i]): + ans += i + +print(ans) +``` + +9~0๊นŒ์ง€์˜ ์ˆ˜๋ฅผ key๋กœ ๊ฐ€์ง€๊ณ  value๊ฐ€ 0์ธ dictionary๋ฅผ ๋งŒ๋“ค๊ณ  ์ˆ˜๊ฐ€ ์–ผ๋งˆ๋‚˜ ๋‚˜์™”๋Š”์ง€ count ํ•œ๋‹ค. ์ดํ›„ dictionary์˜ key ์ˆœ์„œ๋Œ€๋กœ(๋‚ด๋ฆผ์ฐจ์ˆœ) count ๋œ ์ˆ˜๋งŒํผ ๋ฌธ์ž๋ฅผ ๋ถ™์—ฌ๊ฐ€๋ฉฐ ๋‹ต์„ ์™„์„ฑํ•œ๋‹ค. \ No newline at end of file diff --git a/_posts/2022-11-15-boj_1004.markdown b/_posts/2022-11-15-boj_1004.markdown new file mode 100644 index 00000000000..c700437db3c --- /dev/null +++ b/_posts/2022-11-15-boj_1004.markdown @@ -0,0 +1,66 @@ +--- +title: "[BOJ] ์–ด๋ฆฐ ์™•์ž - 1004 (S3)" +author: kwon +date: 2022-11-15T23:00:00 +0900 +categories: [boj, silver] +tags: [math, geometry] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 2 ์ดˆ | 128 MB | + +# ๋ฌธ์ œ + +์–ด๋ฆฐ ์™•์ž๋Š” ์†Œํ˜น์„ฑ B-664์—์„œ ์ž์‹ ์ด ์‚ฌ๋ž‘ํ•˜๋Š” ํ•œ ์†ก์ด ์žฅ๋ฏธ๋ฅผ ์œ„ํ•ด ์‚ด์•„๊ฐ„๋‹ค. ์–ด๋А ๋‚  ์žฅ๋ฏธ๊ฐ€ ์œ„ํ—˜์— ๋น ์ง€๊ฒŒ ๋œ ๊ฒƒ์„ ์•Œ๊ฒŒ ๋œ ์–ด๋ฆฐ ์™•์ž๋Š”, ์žฅ๋ฏธ๋ฅผ ๊ตฌํ•˜๊ธฐ ์œ„ํ•ด ์€ํ•˜์ˆ˜๋ฅผ ๋”ฐ๋ผ ๊ธด ์—ฌํ–‰์„ ํ•˜๊ธฐ ์‹œ์ž‘ํ–ˆ๋‹ค. ํ•˜์ง€๋งŒ ์–ด๋ฆฐ ์™•์ž์˜ ์šฐ์ฃผ์„ ์€ ๊ทธ๋ ‡๊ฒŒ ์ข‹์ง€ ์•Š์•„์„œ ํ–‰์„ฑ๊ณ„ ๊ฐ„์˜ ์ด๋™์„ ์ตœ๋Œ€ํ•œ ํ”ผํ•ด์„œ ์—ฌํ–‰ํ•ด์•ผ ํ•œ๋‹ค. ์•„๋ž˜์˜ ๊ทธ๋ฆผ์€ ์–ด๋ฆฐ ์™•์ž๊ฐ€ ํŽผ์ณ๋ณธ ์€ํ•˜์ˆ˜ ์ง€๋„์˜ ์ผ๋ถ€์ด๋‹ค. + +!https://onlinejudgeimages.s3-ap-northeast-1.amazonaws.com/upload/201003/dfcmhrjj_113gw6bcng2_b.gif + +๋นจ๊ฐ„ ์‹ค์„ ์€ ์–ด๋ฆฐ ์™•์ž๊ฐ€ ์ถœ๋ฐœ์ ์—์„œ ๋„์ฐฉ์ ๊นŒ์ง€ ๋„๋‹ฌํ•˜๋Š”๋ฐ ์žˆ์–ด์„œ ํ•„์š”ํ•œ ํ–‰์„ฑ๊ณ„ ์ง„์ž…/์ดํƒˆ ํšŸ์ˆ˜๋ฅผ ์ตœ์†Œํ™”ํ•˜๋Š” ๊ฒฝ๋กœ์ด๋ฉฐ, ์›์€ ํ–‰์„ฑ๊ณ„์˜ ๊ฒฝ๊ณ„๋ฅผ ์˜๋ฏธํ•œ๋‹ค. ์ด๋Ÿฌํ•œ ๊ฒฝ๋กœ๋Š” ์—ฌ๋Ÿฌ ๊ฐœ ์กด์žฌํ•  ์ˆ˜ ์žˆ์ง€๋งŒ ์ ์–ด๋„ 3๋ฒˆ์˜ ํ–‰์„ฑ๊ณ„ ์ง„์ž…/์ดํƒˆ์ด ํ•„์š”ํ•˜๋‹ค๋Š” ๊ฒƒ์„ ์•Œ ์ˆ˜ ์žˆ๋‹ค. + +์œ„์™€ ๊ฐ™์€ ์€ํ•˜์ˆ˜ ์ง€๋„, ์ถœ๋ฐœ์ , ๋„์ฐฉ์ ์ด ์ฃผ์–ด์กŒ์„ ๋•Œ ์–ด๋ฆฐ ์™•์ž์—๊ฒŒ ํ•„์š”ํ•œ ์ตœ์†Œ์˜ ํ–‰์„ฑ๊ณ„ ์ง„์ž…/์ดํƒˆ ํšŸ์ˆ˜๋ฅผ ๊ตฌํ•˜๋Š” ํ”„๋กœ๊ทธ๋žจ์„ ์ž‘์„ฑํ•ด ๋ณด์ž. ํ–‰์„ฑ๊ณ„์˜ ๊ฒฝ๊ณ„๊ฐ€ ๋งž๋‹ฟ๊ฑฐ๋‚˜ ์„œ๋กœ ๊ต์ฐจํ•˜๋Š” ๊ฒฝ์šฐ๋Š” ์—†๋‹ค. ๋˜ํ•œ, ์ถœ๋ฐœ์ ์ด๋‚˜ ๋„์ฐฉ์ ์ด ํ–‰์„ฑ๊ณ„ ๊ฒฝ๊ณ„์— ๊ฑธ์ณ์ง„ ๊ฒฝ์šฐ ์—ญ์‹œ ์ž…๋ ฅ์œผ๋กœ ์ฃผ์–ด์ง€์ง€ ์•Š๋Š”๋‹ค. + +# ์ž…๋ ฅ + +์ž…๋ ฅ์˜ ์ฒซ ์ค„์—๋Š” ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์˜ ๊ฐœ์ˆ˜ T๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. ๊ทธ ๋‹ค์Œ ์ค„๋ถ€ํ„ฐ ๊ฐ๊ฐ์˜ ํ…Œ์ŠคํŠธ์ผ€์ด์Šค์— ๋Œ€ํ•ด ์ฒซ์งธ ์ค„์— ์ถœ๋ฐœ์  $(x_1, y_1)$๊ณผ ๋„์ฐฉ์  $(x_2, y_2)$์ด ์ฃผ์–ด์ง„๋‹ค. ๋‘ ๋ฒˆ์งธ ์ค„์—๋Š” ํ–‰์„ฑ๊ณ„์˜ ๊ฐœ์ˆ˜ n์ด ์ฃผ์–ด์ง€๋ฉฐ, ์„ธ ๋ฒˆ์งธ ์ค„๋ถ€ํ„ฐ n์ค„์— ๊ฑธ์ณ ํ–‰์„ฑ๊ณ„์˜ ์ค‘์ ๊ณผ ๋ฐ˜์ง€๋ฆ„ $(c_x, c_y, r)$์ด ์ฃผ์–ด์ง„๋‹ค. + +# ์ถœ๋ ฅ + +๊ฐ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์— ๋Œ€ํ•ด ์–ด๋ฆฐ ์™•์ž๊ฐ€ ๊ฑฐ์ณ์•ผ ํ•  ์ตœ์†Œ์˜ ํ–‰์„ฑ๊ณ„ ์ง„์ž…/์ดํƒˆ ํšŸ์ˆ˜๋ฅผ ์ถœ๋ ฅํ•œ๋‹ค. + +# ์ œํ•œ + +- $1000 โ‰ค x_1, y_1, x_2, y_2, c_y, c_xย โ‰ค 1000$ +- $1 โ‰ค r โ‰ค 1000$ +- $1 โ‰ค n โ‰ค 50$ +- ์ขŒํ‘œ์™€ ๋ฐ˜์ง€๋ฆ„์€ ๋ชจ๋‘ ์ •์ˆ˜ + +# ํ’€์ด + +ํ–‰์„ฑ๊ณ„๋ฅผ ๊ผญ ํ†ต๊ณผํ•ด์•ผ ํ•˜๋Š” ์กฐ๊ฑด์„ ๋จผ์ € ์ฐพ๋Š”. ์ถœ๋ฐœ์ ์ด๋‚˜ ๋„์ฐฉ์ ์ด ํ–‰์„ฑ๊ณ„ ์•ˆ์— ์žˆ์„ ๊ฒฝ์šฐ ๋ฌด์กฐ๊ฑด ๊ทธ ํ•ญ์„ฑ๊ณ„๋ฅผ ํ†ต๊ณผํ•ด์•ผ ํ•œ๋‹ค. ์ถœ๋ฐœ์ ์ด๋‚˜ ๋„์ฐฉ์ ์„ ํฌํ•จํ•˜์ง€ ์•Š๊ณ  ์žˆ๋Š” ํ–‰์„ฑ๊ณ„๋Š” ์–ด๋–ป๊ฒŒ๋“  ํ”ผํ•ด๊ฐˆ ์ˆ˜ ์žˆ๊ธฐ ๋•Œ๋ฌธ์ด๋‹ค. + +ํ•œ ์ ์ด ์› ์•ˆ์— ํฌํ•จ๋˜์–ด์žˆ๋Š”์ง€ ํ™•์ธํ•˜๋ ค๋ฉด, ์›์˜ ๋ฐ˜์ง€๋ฆ„๊ณผ ์›์˜ ์ค‘์‹ฌ๊ณผ ๊ทธ ์  ์‚ฌ์ด์˜ ๊ฑฐ๋ฆฌ๋ฅผ ๋น„๊ตํ•˜๋ฉด ๋œ๋‹ค. ์›์˜ ๋ฐ˜์ง€๋ฆ„์ด ๋” ํฌ๋‹ค๋ฉด ๊ทธ ์ ์€ ๋ฌด์กฐ๊ฑด ์› ์•ˆ์— ์œ„์น˜ํ•˜๊ฒŒ ๋˜๊ณ , ์›์˜ ์ค‘์‹ฌ๊ณผ ๊ทธ ์  ์‚ฌ์ด์˜ ๊ฑฐ๋ฆฌ๋ฅผ ๋น„๊ตํ•˜๋ฉด ๊ทธ ์ ์€ ์› ๋ฐ–์— ์œ„์น˜ํ•˜๊ฒŒ ๋œ๋‹ค. + +## ์ฝ”๋“œ + +```python +t = int(input()) +for _ in range(t): + cnt = 0 + x1, y1, x2, y2 = map(int, input().split()) + n = int(input()) + for _ in range(n): + c_x, c_y, r = map(int, input().split()) + r = r ** 2 + dis1 = (c_x - x1) ** 2 + (c_y - y1) ** 2 + dis2 = (c_x - x2) ** 2 + (c_y - y2) ** 2 + if (dis1 < r and dis2 < r) or (dis1 > r and dis2 > r): + continue + else: + cnt += 1 + print(cnt) +``` + +๋‘ ์  ์‚ฌ์ด์˜ ๊ฑฐ๋ฆฌ๋Š” x, y ์ขŒํ‘œ ๊ฐ„์˜ ์ฐจ์ด์˜ ์ œ๊ณฑ์˜ ํ•ฉ์— ์ œ๊ณฑ๊ทผ์„ ์ทจํ•ด ๊ตฌํ•  ์ˆ˜ ์žˆ๋‹ค. ์œ„ ์ฝ”๋“œ์—์„œ๋Š” ์—ฐ์‚ฐ์„ ์กฐ๊ธˆ ์ค„์ด๊ธฐ ์œ„ํ•ด ์ œ๊ณฑ์ธ ์ƒํƒœ์—์„œ ํฌ๊ธฐ๋ฅผ ๋น„๊ตํ•œ๋‹ค. ์ด ๋ฐฉ๋ฒ•์œผ๋กœ ์ถœ๋ฐœ์ ๊ณผ ๋„์ฐฉ์ ์„ ํฌํ•จํ•˜๋Š” ์›๋“ค์˜ ๊ฐœ์ˆ˜๋ฅผ ๊ตฌํ•ด์„œ ์ถœ๋ ฅํ•œ๋‹ค. \ No newline at end of file diff --git a/_posts/2022-11-15-boj_1037.markdown b/_posts/2022-11-15-boj_1037.markdown new file mode 100644 index 00000000000..2d4de94d030 --- /dev/null +++ b/_posts/2022-11-15-boj_1037.markdown @@ -0,0 +1,43 @@ +--- +title: "[BOJ] ์•ฝ์ˆ˜ - 1037 (B1)" +author: kwon +date: 2022-11-15T23:00:00 +0900 +categories: [boj, bronze] +tags: [math, number theory] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 2 ์ดˆ | 512 MB | + +# ๋ฌธ์ œ + +์–‘์ˆ˜ A๊ฐ€ N์˜ ์ง„์งœ ์•ฝ์ˆ˜๊ฐ€ ๋˜๋ ค๋ฉด, N์ด A์˜ ๋ฐฐ์ˆ˜์ด๊ณ , A๊ฐ€ 1๊ณผ N์ด ์•„๋‹ˆ์–ด์•ผ ํ•œ๋‹ค.ย ์–ด๋–ค ์ˆ˜ N์˜ ์ง„์งœ ์•ฝ์ˆ˜๊ฐ€ ๋ชจ๋‘ ์ฃผ์–ด์งˆ ๋•Œ, N์„ ๊ตฌํ•˜๋Š” ํ”„๋กœ๊ทธ๋žจ์„ ์ž‘์„ฑํ•˜์‹œ์˜ค. + +# ์ž…๋ ฅ + +์ฒซ์งธ ์ค„์— N์˜ ์ง„์งœ ์•ฝ์ˆ˜์˜ ๊ฐœ์ˆ˜๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. ์ด ๊ฐœ์ˆ˜๋Š” 50๋ณด๋‹ค ์ž‘๊ฑฐ๋‚˜ ๊ฐ™์€ ์ž์—ฐ์ˆ˜์ด๋‹ค. ๋‘˜์งธ ์ค„์—๋Š” N์˜ ์ง„์งœ ์•ฝ์ˆ˜๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. 1,000,000๋ณด๋‹ค ์ž‘๊ฑฐ๋‚˜ ๊ฐ™๊ณ , 2๋ณด๋‹ค ํฌ๊ฑฐ๋‚˜ ๊ฐ™์€ ์ž์—ฐ์ˆ˜์ด๊ณ , ์ค‘๋ณต๋˜์ง€ ์•Š๋Š”๋‹ค. + +# ์ถœ๋ ฅ + +์ฒซ์งธ ์ค„์— N์„ ์ถœ๋ ฅํ•œ๋‹ค. N์€ ํ•ญ์ƒ 32๋น„ํŠธ ๋ถ€ํ˜ธ์žˆ๋Š” ์ •์ˆ˜๋กœ ํ‘œํ˜„ํ•  ์ˆ˜ ์žˆ๋‹ค.sใ„ด + +# ํ’€์ด + +์ฃผ์–ด์ง€๋Š” A๊ฐ€ ์•ฝ์ˆ˜์ด๋ฏ€๋กœ ์ด๋“ค์„ ๋ชจ๋‘๋ฅผ ์•ฝ์ˆ˜๋กœ ๊ฐ€์ง€๋Š” ์ˆ˜ N๋ฅผ ๊ตฌํ•˜๊ธฐ ์œ„ํ•ด์„œ ๊ตณ์ด ๋ชจ๋“  A๋ฅผ ํ™•์ธํ•˜๋ฉฐ ๊ณ„์‚ฐํ•  ํ•„์š”๋Š” ์—†๋‹ค. ๊ณฑํ–ˆ์„ ๋•Œ N์ด ๋˜๋„๋ก ํ•˜๋Š” ์•ฝ์ˆ˜๋“ค์˜ ์Œ์„ ์ฐพ์•„๋‚ด๊ธฐ๋งŒ ํ•˜๋ฉด ๋˜๊ธฐ ๋•Œ๋ฌธ์ด๋‹ค. + +๋‚ด๊ฐ€ ์ƒ๊ฐํ–ˆ์„ ๋•Œ ๊ฐ€์žฅ ์‰ฌ์šด ๋ฐฉ๋ฒ•์€ ์ฃผ์–ด์ง„ ์•ฝ์ˆ˜๋“ค ์ค‘ ์ตœ์†Œ๊ฐ’๊ณผ ์ตœ๋Œ€๊ฐ’์„ ์ฐพ์•„ ๊ณฑํ•˜๋Š” ๊ฒƒ์ด๋‹ค. ๋ชจ๋“  ์•ฝ์ˆ˜๋“ค ์ค‘์—์„œ ์ตœ์†Œ์™€ ์ตœ๋Œ€๋กœ ๋งŒ๋“  ์Œ์€ ๊ณฑํ–ˆ์„ ๋•Œ ํ•ญ์ƒ ์ด ์•ฝ์ˆ˜๋“ค์„ ํฌํ•จํ•˜๋Š” ์ˆ˜๊ฐ€ ๋œ๋‹ค. + +## ์ฝ”๋“œ + +```python +n = int(input()) +div = list(map(int, input().split())) +min, max = float('inf'), float('-inf') +for d in div: + if min > d : min = d + if max < d : max = d +print(min * max) +``` \ No newline at end of file diff --git a/_posts/2022-11-22-boj_1010.markdown b/_posts/2022-11-22-boj_1010.markdown new file mode 100644 index 00000000000..d9955dbb9f9 --- /dev/null +++ b/_posts/2022-11-22-boj_1010.markdown @@ -0,0 +1,71 @@ +--- +title: "[BOJ] ๋‹ค๋ฆฌ ๋†“๊ธฐ - 1010 (S5)" +author: kwon +date: 2022-11-22T23:00:00 +0900 +categories: [boj, silver] +tags: [math, dynamic programing, combinatorics] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 0.5 ์ดˆ (์ถ”๊ฐ€ ์‹œ๊ฐ„ ์—†์Œ) | 128 MB | + +# ๋ฌธ์ œ + +์žฌ์›์ด๋Š” ํ•œ ๋„์‹œ์˜ ์‹œ์žฅ์ด ๋˜์—ˆ๋‹ค. ์ด ๋„์‹œ์—๋Š”ย ๋„์‹œ๋ฅผ ๋™์ชฝ๊ณผ ์„œ์ชฝ์œผ๋กœ ๋‚˜๋ˆ„๋Š” ํฐ ์ผ์ง์„  ๋ชจ์–‘์˜ ๊ฐ•์ด ํ๋ฅด๊ณ  ์žˆ๋‹ค.ย ํ•˜์ง€๋งŒ ์žฌ์›์ด๋Š” ๋‹ค๋ฆฌ๊ฐ€ ์—†์–ด์„œ ์‹œ๋ฏผ๋“ค์ด ๊ฐ•์„ ๊ฑด๋„ˆ๋Š”๋ฐ ํฐ ๋ถˆํŽธ์„ ๊ฒช๊ณ  ์žˆ์Œ์„ ์•Œ๊ณ  ๋‹ค๋ฆฌ๋ฅผ ์ง“๊ธฐ๋กœ ๊ฒฐ์‹ฌํ•˜์˜€๋‹ค. ๊ฐ• ์ฃผ๋ณ€์—์„œ ๋‹ค๋ฆฌ๋ฅผ ์ง“๊ธฐ์— ์ ํ•ฉํ•œ ๊ณณ์„ ์‚ฌ์ดํŠธ๋ผ๊ณ  ํ•œ๋‹ค. ์žฌ์›์ด๋Š” ๊ฐ• ์ฃผ๋ณ€์„ ๋ฉด๋ฐ€ํžˆ ์กฐ์‚ฌํ•ด ๋ณธ ๊ฒฐ๊ณผ ๊ฐ•์˜ ์„œ์ชฝ์—๋Š” N๊ฐœ์˜ ์‚ฌ์ดํŠธ๊ฐ€ ์žˆ๊ณ  ๋™์ชฝ์—๋Š” M๊ฐœ์˜ ์‚ฌ์ดํŠธ๊ฐ€ ์žˆ๋‹ค๋Š” ๊ฒƒ์„ ์•Œ์•˜๋‹ค. (N โ‰ค M) + +์žฌ์›์ด๋Š” ์„œ์ชฝ์˜ ์‚ฌ์ดํŠธ์™€ ๋™์ชฝ์˜ ์‚ฌ์ดํŠธ๋ฅผ ๋‹ค๋ฆฌ๋กœ ์—ฐ๊ฒฐํ•˜๋ ค๊ณ  ํ•œ๋‹ค. (์ด๋•Œ ํ•œ ์‚ฌ์ดํŠธ์—๋Š” ์ตœ๋Œ€ ํ•œ ๊ฐœ์˜ ๋‹ค๋ฆฌ๋งŒ ์—ฐ๊ฒฐ๋  ์ˆ˜ ์žˆ๋‹ค.) ์žฌ์›์ด๋Š” ๋‹ค๋ฆฌ๋ฅผ ์ตœ๋Œ€ํ•œ ๋งŽ์ด ์ง€์œผ๋ ค๊ณ  ํ•˜๊ธฐ ๋•Œ๋ฌธ์— ์„œ์ชฝ์˜ ์‚ฌ์ดํŠธ ๊ฐœ์ˆ˜๋งŒํผ (N๊ฐœ) ๋‹ค๋ฆฌ๋ฅผ ์ง€์œผ๋ ค๊ณ  ํ•œ๋‹ค. ๋‹ค๋ฆฌ๋ผ๋ฆฌ๋Š” ์„œ๋กœ ๊ฒน์ณ์งˆ ์ˆ˜ ์—†๋‹ค๊ณ  ํ•  ๋•Œ ๋‹ค๋ฆฌ๋ฅผ ์ง€์„ ์ˆ˜ ์žˆ๋Š” ๊ฒฝ์šฐ์˜ ์ˆ˜๋ฅผ ๊ตฌํ•˜๋Š” ํ”„๋กœ๊ทธ๋žจ์„ ์ž‘์„ฑํ•˜๋ผ. + +![https://www.acmicpc.net/upload/201003/pic1.JPG](https://www.acmicpc.net/upload/201003/pic1.JPG) + +# ์ž…๋ ฅ + +์ž…๋ ฅ์˜ ์ฒซ ์ค„์—๋Š” ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์˜ ๊ฐœ์ˆ˜ T๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. ๊ทธ ๋‹ค์Œ ์ค„๋ถ€ํ„ฐ ๊ฐ๊ฐ์˜ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์— ๋Œ€ํ•ด ๊ฐ•์˜ ์„œ์ชฝ๊ณผ ๋™์ชฝ์— ์žˆ๋Š” ์‚ฌ์ดํŠธ์˜ ๊ฐœ์ˆ˜ ์ •์ˆ˜ N, M (0 < N โ‰ค M < 30)์ด ์ฃผ์–ด์ง„๋‹ค. + +# ์ถœ๋ ฅ + +๊ฐ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์— ๋Œ€ํ•ด ์ฃผ์–ด์ง„ ์กฐ๊ฑด ํ•˜์— ๋‹ค๋ฆฌ๋ฅผ ์ง€์„ ์ˆ˜ ์žˆ๋Š” ๊ฒฝ์šฐ์˜ ์ˆ˜๋ฅผ ์ถœ๋ ฅํ•œ๋‹ค. + +# ํ’€์ด + +## ์กฐํ•ฉ + +์„œ์ชฝ์˜ ๋‹ค๋ฆฌ์˜ ๊ฐœ์ˆ˜๋งŒํผ ๋™์ชฝ์˜ ๋‹ค๋ฆฌ๋ฅผ ๊ณ ๋ฅด๋Š” ๋ฌธ์ œ๋กœ ์ƒ๊ฐํ•  ์ˆ˜ ์žˆ๋‹ค. ํ•˜์ง€๋งŒ ์ฃผ์˜ํ•  ์ ์€ ๋‹ค๋ฆฌ๊ฐ€ ๊ต์ฐจ๋˜๋ฉด ์•ˆ๋œ๋‹ค๋Š” ๊ฒƒ์ด๋‹ค. ๋•Œ๋ฌธ์— ์ˆœ์„œ๋ฅผ ์ƒ๊ฐํ•˜์ง€ ์•Š๊ณ  ์„œ์ชฝ์˜ ๋‹ค๋ฆฌ๋ฅผ ๋™์ชฝ ๋‹ค๋ฆฌ ๊ฐœ์ˆ˜๋งŒํผ ๊ณจ๋ผ์•ผ ํ•œ๋‹ค. + +์ˆœ์„œ ์—†์ด M๊ฐœ๋ฅผ ๊ณ ๋ฅธ ํ›„ ์„œ์ชฝ ๋‹ค๋ฆฌ์˜ ์œ„๋ถ€ํ„ฐ, ๊ณ ๋ฅธ ๋‹ค๋ฆฌ์˜ ์œ„๋ถ€ํ„ฐ ๋‚ด๋ ค๊ฐ€๋ฉด์„œ ์ด์–ด์ค€๋‹ค. ์œ„๋ถ€ํ„ฐ ์ฐจ๋ก€๋Œ€๋กœ ์ด์–ด์ฃผ๋ฉด ๋‹ค๋ฆฌ๊ฐ€ ๊ต์ฐจ๋˜๋Š” ์ผ์ด ์—†๊ณ  ์ˆœ์„œ๋Š” ๋”ฑ ํ•œ ๊ฐ€์ง€๋งŒ ์ฃผ์–ด์ง„๋‹ค. + +๊ทธ๋Ÿฌ๋ฏ€๋กœ ์ˆœ์„œ ์—†์ด ๊ณ ๋ฅด๋Š” ์กฐํ•ฉ${m}\choose{n}$์„ ์ด์šฉํ•˜๋ฉด ๋œ๋‹ค. + +## ๋™์  ํ”„๋กœ๊ทธ๋ž˜๋ฐ + +๋‹ค๋ฆฌ๋ฅผ ์ด์„ ์ˆ˜ ์žˆ๋Š” ์Œ์„ ์žฌ๊ท€๋กœ ๋ฐ˜๋ณตํ•˜๋ฉด์„œ ๊ฐœ์ˆ˜๋ฅผ ์ฐพ๋Š”๋‹ค. `b1`, `b2`๋Š” ๊ฐ๊ฐ ์„œ-๋™์ชฝ์˜ ์‚ฌ์ดํŠธ ๋ฒˆํ˜ธ์ด๋ฉฐ `b1`์ด N์ด ๋์„ ๋•Œ ๋ชจ๋“  ๋‹ค๋ฆฌ๊ฐ€ ์ด์–ด์ง„ ๊ฒƒ์ด๋ฏ€๋กœ ๋‹ค๋ฆฌ๋ฅผ ์ž‡๋Š” ๊ฒฝ์šฐ๋ฅผ ํ•˜๋‚˜ ์ฐพ์•˜๋‹ค๋Š” ์˜๋ฏธ๋กœ 1์„ returnํ•œ๋‹ค. + +ํƒ์ƒ‰์€ ์œ„๋ถ€ํ„ฐ ์‹œ์ž‘ํ•˜๋ฉฐ ๊ฐ ํƒ์ƒ‰์˜ ๋ฒ”์œ„๋Š” ์ง์ „์— ์—ฐ๊ฒฐํ–ˆ๋˜ ์‚ฌ์ดํŠธ์˜ ๋‹ค์Œ(`b2+ 1`)๋ถ€ํ„ฐ ๋‚จ์€ ์„œ์ชฝ ์‚ฌ์ดํŠธ ๊ฐœ์ˆ˜๋งŒํผ ๋‚จ์„ ๋•Œ๊นŒ์ง€(`(b2 + 1) + m - n`)์ด๋‹ค. ๋๋‚˜๋Š” ๋ฒ”์œ„๊ฐ€ ์•ž์„  ์‹์œผ๋กœ ๊ณ„์‚ฐ๋˜๋Š” ์ด์œ ๋Š” ๋‚จ์•„์žˆ๋Š” ์„œ์ชฝ ์‚ฌ์ดํŠธ๊ฐ€ ์—ฐ๊ฒฐ๋  ์ˆ˜ ์žˆ๋Š” ๋™์ชฝ ์‚ฌ์ดํŠธ๊ฐ€ ๋‚จ์•„ ์žˆ์–ด์•ผ ํ•˜๊ธฐ ๋•Œ๋ฌธ์ด๋‹ค. + +์˜ˆ๋ฅผ ๋“ค์–ด n, m์ด ๊ฐ๊ฐ 4, 7์ด๊ณ  ํ˜„์žฌ ์—ฐ๊ฒฐํ•˜๋ ค๋Š” ์‚ฌ์ดํŠธ๋ฅผ ํฌํ•จํ•˜์—ฌ ๋‚จ์•„์žˆ๋Š” ์„œ์ชฝ ์‚ฌ์ดํŠธ๊ฐ€ 3๊ฐœ, ๋™์ชฝ ์‚ฌ์ดํŠธ๋Š” 5 ๊ฐœ๋ผ๊ณ  ํ•œ๋‹ค๋ฉด, ํ˜„์žฌ ์‚ฌ์ดํŠธ๋ฅผ ์—ฐ๊ฒฐํ•˜๊ณ  ๋‚จ์•„์žˆ๋Š” ๋™์ชฝ ์‚ฌ์ดํŠธ๊ฐ€ ์ตœ์†Œ 2 ๊ฐœ๋Š” ๋˜์–ด์•ผ ํ•˜๊ธฐ ๋•Œ๋ฌธ์— 1 + (7 - 4) + 1 = 5๋กœ ๋™์ชฝ์˜ 5๋ฒˆ์งธ ์‚ฌ์ดํŠธ๊นŒ์ง€๋งŒ ์—ฐ๊ฒฐ์ด ๊ฐ€๋Šฅํ•˜๋‹ค. + +### ์ฝ”๋“œ + +```python +def dp(b1, b2, n, m): + global memo + if b1 == n: + return 1 + cnt = 0 + for i in range(b2 + 1, min(m, b2 + m - n + 1) + 1): + if (b1 + 1, i) in memo: + cnt += memo[(b1 + 1, i)] + else: + tmp = dp(b1 + 1, i, n, m) + cnt += tmp + memo[(b1 + 1, i)] = tmp + return cnt + +t = int(input()) +for _ in range(t): + memo = {} + n, m = map(int, input().split()) + print(dp(0, 0, n, m)) +``` \ No newline at end of file diff --git a/_posts/2022-11-22-boj_3036.markdown b/_posts/2022-11-22-boj_3036.markdown new file mode 100644 index 00000000000..56ab60b63a7 --- /dev/null +++ b/_posts/2022-11-22-boj_3036.markdown @@ -0,0 +1,79 @@ +--- +title: "[BOJ] ๋ง - 3036 (S4)" +author: kwon +date: 2022-11-22T23:00:00 +0900 +categories: [boj, silver] +tags: [math, number theory, Euclidean algorithm] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 1 ์ดˆ | 128 MB | + +# ๋ฌธ์ œ + +์ƒ๊ทผ์ด๋Š” ์ฐฝ๊ณ ์—์„œ ๋ง N๊ฐœ๋ฅผ ๋ฐœ๊ฒฌํ–ˆ๋‹ค. ์ƒ๊ทผ์ด๋Š” ๊ฐ๊ฐ์˜ ๋ง์ด ์•ž์— ์žˆ๋Š” ๋ง๊ณผ ๋’ค์— ์žˆ๋Š” ๋ง๊ณผ ์ ‘ํ•˜๋„๋ก ๋ฐ”๋‹ฅ์— ๋‚ด๋ ค๋†“์•˜๋‹ค. + +![](https://upload.acmicpc.net/44a0e81a-3870-4e94-8db0-73543fca3aa6/-/preview/) + +์ƒ๊ทผ์ด๋Š” ์ฒซ ๋ฒˆ์งธ ๋ง์„ ๋Œ๋ฆฌ๊ธฐ ์‹œ์ž‘ํ–ˆ๊ณ , ๋‚˜๋จธ์ง€ ๋ง๋„ ๊ฐ™์ด ๋Œ์•„๊ฐ„๋‹ค๋Š” ์‚ฌ์‹ค์„ ๋ฐœ๊ฒฌํ–ˆ๋‹ค. ๋‚˜๋จธ์ง€ ๋ง์€ ์ฒซ ๋ฒˆ์งธ ๋ง ๋ณด๋‹ค ๋น ๋ฅด๊ฒŒ ๋Œ์•„๊ฐ€๊ธฐ๋„ ํ–ˆ๊ณ , ๋А๋ฆฌ๊ฒŒ ๋Œ์•„๊ฐ€๊ธฐ๋„ ํ–ˆ๋‹ค. ์ด๋ ‡๊ฒŒ ๋ง์„ ๋Œ๋ฆฌ๋‹ค ๋ณด๋‹ˆ ์ฒซ ๋ฒˆ์งธ ๋ง์„ ํ•œ ๋ฐ”ํ€ด ๋Œ๋ฆฌ๋ฉด, ๋‚˜๋จธ์ง€ ๋ง์€ ๋ช‡ ๋ฐ”ํ€ด ๋„๋Š”์ง€ ๊ถ๊ธˆํ•ด์กŒ๋‹ค. + +๋ง์˜ ๋ฐ˜์ง€๋ฆ„์ด ์ฃผ์–ด์ง„๋‹ค. ์ด๋•Œ, ์ฒซ ๋ฒˆ์งธ ๋ง์„ ํ•œ ๋ฐ”ํ€ด ๋Œ๋ฆฌ๋ฉด, ๋‚˜๋จธ์ง€ ๋ง์€ ๋ช‡ ๋ฐ”ํ€ด ๋Œ์•„๊ฐ€๋Š”์ง€ ๊ตฌํ•˜๋Š” ํ”„๋กœ๊ทธ๋žจ์„ ์ž‘์„ฑํ•˜์‹œ์˜ค. + +# ์ž…๋ ฅ + +์ฒซ์งธ ์ค„์— ๋ง์˜ ๊ฐœ์ˆ˜ N์ด ์ฃผ์–ด์ง„๋‹ค. (3 โ‰ค N โ‰ค 100) + +๋‹ค์Œ ์ค„์—๋Š” ๋ง์˜ ๋ฐ˜์ง€๋ฆ„์ด ์ƒ๊ทผ์ด๊ฐ€ ๋ฐ”๋‹ฅ์— ๋†“์€ ์ˆœ์„œ๋Œ€๋กœ ์ฃผ์–ด์ง„๋‹ค. ๋ฐ˜์ง€๋ฆ„์€ 1๊ณผ 1000๋ฅผ ํฌํ•จํ•˜๋Š” ์‚ฌ์ด์˜ ์ž์—ฐ์ˆ˜์ด๋‹ค. + +# ์ถœ๋ ฅ + +์ถœ๋ ฅ์€ ์ด N-1์ค„์„ ํ•ด์•ผ ํ•œ๋‹ค. ์ฒซ ๋ฒˆ์งธ ๋ง์„ ์ œ์™ธํ•œ ๊ฐ๊ฐ์˜ ๋ง์— ๋Œ€ํ•ด์„œ, ์ฒซ ๋ฒˆ์งธ ๋ง์„ ํ•œ ๋ฐ”ํ€ด ๋Œ๋ฆฌ๋ฉด ๊ทธ ๋ง์€ ๋ช‡ ๋ฐ”ํ€ด ๋„๋Š”์ง€ ๊ธฐ์•ฝ ๋ถ„์ˆ˜ ํ˜•ํƒœ A/B๋กœ ์ถœ๋ ฅํ•œ๋‹ค. + +# ํ’€์ด + +ํฌ๊ฒŒ 3๊ฐ€์ง€ ๊ฒฝ์šฐ๊ฐ€ ์กด์žฌํ•˜๋ฉฐ ๊ทธ์— ๋Œ€ํ•œ ์ถœ๋ ฅ์€ ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. + +1. ์ฒซ ๋ฒˆ์งธ ๋ง์˜ ํฌ๊ธฐ๋ฅผ ๋ง ํ•˜๋‚˜์˜ ํฌ๊ธฐ๋กœ ๋‚˜๋ˆ„์—ˆ์„ ๋•Œ ๋‚˜๋ˆ„์–ด ๋–จ์–ด์ง€๋Š” ๊ฒฝ์šฐ. + + `print(f"{first // r}/1")` + + ์ฒซ ๋ฒˆ์งธ ๋ง์˜ ํฌ๊ธฐ๊ฐ€ ๋” ํฌ๋ฉฐ ๋ฐฐ์ˆ˜ - ์•ฝ์ˆ˜์˜ ๊ด€๊ณ„์ด๋ฏ€๋กœ ์ฒซ ๋ฒˆ์งธ ๋ง์„ ํ˜„์žฌ ํ™•์ธํ•˜๊ณ  ์žˆ๋Š” ๋ง์˜ ํฌ๊ธฐ๋กœ ๋‚˜๋ˆˆ ๋ชซ์„ ๋ถ„์ž๋กœ, 1์„ ๋ถ„๋ชจ๋กœ ํ•˜์—ฌ ์ถœ๋ ฅํ•œ๋‹ค. + +2. ๋ง ํ•˜๋‚˜์˜ ํฌ๊ธฐ๋ฅผ ์ฒซ ๋ฒˆ์งธ ๋ง์˜ ํฌ๊ธฐ๋กœ ๋‚˜๋ˆ„์—ˆ์„ ๋•Œ ๋‚˜๋ˆ„์–ด ๋–จ์–ด์ง€๋Š” ๊ฒฝ์šฐ. + + `print(f"1/{r // first}")` + + ํ˜„์žฌ ๋ง์˜ ํฌ๊ธฐ๊ฐ€ ๋” ํฌ๋ฉฐ ๋ฐฐ์ˆ˜ - ์•ฝ์ˆ˜์˜ ๊ด€๊ณ„์ด๋ฏ€๋กœ ํ˜„์žฌ ๋ง์˜ ํฌ๊ธฐ๋ฅผ ์ฒซ ๋ฒˆ์งธ ๋ง์˜ ํฌ๊ธฐ๋กœ ๋‚˜๋ˆˆ ๋ชซ์„ ๋ถ„๋ชจ๋กœ, 1์„ ๋ถ„์ž๋กœ ํ•˜์—ฌ ์ถœ๋ ฅํ•œ๋‹ค. + +3. ๋‘˜ ๋‹ค ๋‚˜๋ˆ„์–ด ๋–จ์–ด์ง€์ง€ ์•Š๋Š” ๊ฒฝ์šฐ + + ์ด ๊ฒฝ์šฐ ํ•œ ์ชฝ์˜ ๊ฐ’์œผ๋กœ ๋‚˜๋ˆ„์–ด ๋–จ์–ด์ง€์ง€ ์•Š๊ธฐ ๋•Œ๋ฌธ์— ๋ถ„์ž์™€ ๋ถ„๋ชจ๋ฅผ ์•ฝ๋ถ„ํ•ด์•ผ ํ•œ๋‹ค. ์•ฝ๋ถ„์€ ๊ฐ ์ˆ˜๋ฅผ ์ตœ๋Œ€๊ณต์•ฝ์ˆ˜๋กœ ๋‚˜๋ˆ„๋ฉด ๊ตฌํ•  ์ˆ˜ ์žˆ๋‹ค. + + ์ตœ๋Œ€ ๊ณต์•ฝ์ˆ˜๋Š” [์œ ํด๋ฆฌ๋“œ ํ˜ธ์ œ๋ฒ•](/posts/euclidean_algorithm/)์œผ๋กœ ๊ตฌํ•  ์ˆ˜ ์žˆ๋‹ค. + + +## ์ฝ”๋“œ + +```python +n = int(input()) +ring = list(map(int, input().split())) +first = ring[0] +rings = ring[1:] +def gcd(a, b): + while a % b != 0: + r = a % b + a, b = b, r + return b + +for r in rings: + if first % r == 0: + print(f"{first // r}/1") + elif r % first == 0: + print(f"1/{r // first}") + else: + g = gcd(first, r) + print(f"{first // g}/{r // g}") +``` \ No newline at end of file diff --git a/_posts/2022-11-22-euclidean_algorithm.markdown b/_posts/2022-11-22-euclidean_algorithm.markdown new file mode 100644 index 00000000000..e0511260413 --- /dev/null +++ b/_posts/2022-11-22-euclidean_algorithm.markdown @@ -0,0 +1,31 @@ +--- +title: "์œ ํด๋ฆฌ๋“œ ํ˜ธ์ œ๋ฒ• (Euclidean algorithm)" +author: kwon +date: 2022-11-22T23:00:00 +0900 +categories: [background, math] +tags: [math, number theory, Euclidean algorithm] +math: true +mermaid: false +--- + +์œ ํด๋ฆฌ๋“œ ํ˜ธ์ œ๋ฒ•์€ 2๊ฐœ์˜ ์ž์—ฐ์ˆ˜์— ๋Œ€ํ•ด ์ตœ๋Œ€๊ณต์•ฝ์ˆ˜๋ฅผ ๊ตฌํ•˜๋Š” ์•Œ๊ณ ๋ฆฌ์ฆ˜์ด๋ฉฐ ๋‹ค์Œ๊ณผ ๊ฐ™์€ ์„ฑ์งˆ์„ ํ†ตํ•ด ์•Œ๊ณ ๋ฆฌ์ฆ˜์„ ์ง„ํ–‰ํ•œ๋‹ค. + +> $a, b \in \mathbb{Z}$์ด๊ณ  $a$๋ฅผ $b$๋กœ ๋‚˜๋ˆˆ ๋‚˜๋จธ์ง€๋ฅผ $r$์ด๋ผ ํ•˜์ž. ($b \leq a, 0 \leq r \leq b$) +> +> +> $a, b$์˜ ์ตœ๋Œ€ ๊ณต์•ฝ์ˆ˜๋ฅผ $(a, b)$๋ผ๊ณ  ํ•˜๋ฉด, ๋‹ค์Œ์ด ์„ฑ๋ฆฝํ•œ๋‹ค. +> +> $(a, b)=(b, r)$ +> + +์ถœ์ฒ˜: wikipidia + +$r$์ด 0์ด ๋  ๋•Œ ์•Œ๊ณ ๋ฆฌ์ฆ˜์„ ๋ฉˆ์ถ”๋ฉฐ, ์ด ๋•Œ์˜ $b$๊ฐ€ ์ตœ๋Œ€๊ณต์•ฝ์ˆ˜๊ฐ€ ๋œ๋‹ค. + +์˜ˆ๋ฅผ ๋“ค์–ด 1460๊ณผ 1037์— ๋Œ€ํ•ด ์•Œ๊ณ ๋ฆฌ์ฆ˜์„ ์ง„ํ–‰ํ•ด๋ณด๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. + +$$\begin{flalign*} +(1460, 1037)\\=(1037, 323)\\=(323, 68)\\=(68, 52)\\=(52, 16)\\=(16, 4)\\=(4,0) +\end{flalign*}$$ + +$r$์ด 0์ผ๋•Œ $b$๊ฐ€ 4์ด๋ฏ€๋กœ 1460๊ณผ 1037์˜ ์ตœ๋Œ€๊ณต์•ฝ์ˆ˜๋Š” 4์ด๋‹ค. \ No newline at end of file diff --git a/_posts/2022-12-12-boj_1541.markdown b/_posts/2022-12-12-boj_1541.markdown new file mode 100644 index 00000000000..e81f3a7816c --- /dev/null +++ b/_posts/2022-12-12-boj_1541.markdown @@ -0,0 +1,76 @@ +--- +title: "[BOJ] ์žƒ์–ด๋ฒ„๋ฆฐ ๊ด„ํ˜ธ - 1541 (S2)" +author: kwon +date: 2022-12-12T23:00:00 +0900 +categories: [boj, silver] +tags: [math, string, greedy algorithm, parsing] +math: true +mermaid: false +--- +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 2์ดˆ | 128 MB | + +## ๋ฌธ์ œ + +์„ธ์ค€์ด๋Š” ์–‘์ˆ˜์™€ +, -, ๊ทธ๋ฆฌ๊ณ  ๊ด„ํ˜ธ๋ฅผ ๊ฐ€์ง€๊ณ  ์‹์„ ๋งŒ๋“ค์—ˆ๋‹ค. ๊ทธ๋ฆฌ๊ณ  ๋‚˜์„œ ์„ธ์ค€์ด๋Š” ๊ด„ํ˜ธ๋ฅผ ๋ชจ๋‘ ์ง€์› ๋‹ค. + +๊ทธ๋ฆฌ๊ณ  ๋‚˜์„œ ์„ธ์ค€์ด๋Š” ๊ด„ํ˜ธ๋ฅผ ์ ์ ˆํžˆ ์ณ์„œ ์ด ์‹์˜ ๊ฐ’์„ ์ตœ์†Œ๋กœ ๋งŒ๋“ค๋ ค๊ณ  ํ•œ๋‹ค. + +๊ด„ํ˜ธ๋ฅผ ์ ์ ˆํžˆ ์ณ์„œ ์ด ์‹์˜ ๊ฐ’์„ ์ตœ์†Œ๋กœ ๋งŒ๋“œ๋Š” ํ”„๋กœ๊ทธ๋žจ์„ ์ž‘์„ฑํ•˜์‹œ์˜ค. + +## ์ž…๋ ฅ + +์ฒซ์งธ ์ค„์— ์‹์ด ์ฃผ์–ด์ง„๋‹ค. ์‹์€ โ€˜0โ€™~โ€˜9โ€™, โ€˜+โ€™, ๊ทธ๋ฆฌ๊ณ  โ€˜-โ€™๋งŒ์œผ๋กœ ์ด๋ฃจ์–ด์ ธ ์žˆ๊ณ , ๊ฐ€์žฅ ์ฒ˜์Œ๊ณผ ๋งˆ์ง€๋ง‰ ๋ฌธ์ž๋Š” ์ˆซ์ž์ด๋‹ค. ๊ทธ๋ฆฌ๊ณ  ์—ฐ์†ํ•ด์„œ ๋‘ ๊ฐœ ์ด์ƒ์˜ ์—ฐ์‚ฐ์ž๊ฐ€ ๋‚˜ํƒ€๋‚˜์ง€ ์•Š๊ณ , 5์ž๋ฆฌ๋ณด๋‹ค ๋งŽ์ด ์—ฐ์†๋˜๋Š” ์ˆซ์ž๋Š” ์—†๋‹ค. ์ˆ˜๋Š” 0์œผ๋กœ ์‹œ์ž‘ํ•  ์ˆ˜ ์žˆ๋‹ค. ์ž…๋ ฅ์œผ๋กœ ์ฃผ์–ด์ง€๋Š” ์‹์˜ ๊ธธ์ด๋Š” 50๋ณด๋‹ค ์ž‘๊ฑฐ๋‚˜ ๊ฐ™๋‹ค. + +## ์ถœ๋ ฅ + +์ฒซ์งธ ์ค„์— ์ •๋‹ต์„ ์ถœ๋ ฅํ•œ๋‹ค. + +## ํ’€์ด + +์ž…๋ ฅ๋˜๋Š” ๋ถ€ํ˜ธ๋Š” โ€˜+โ€™, โ€˜-โ€™ ๋ฟ์ด๊ธฐ ๋•Œ๋ฌธ์—, ์ตœ์†Œ๋ฅผ ๋งŒ๋“ค ์ˆ˜ ์žˆ๋Š” ๋ฐฉ๋ฒ•์€ ๊ฐ„๋‹จํ•˜๊ฒŒ ๊ตฌํ•  ์ˆ˜ ์žˆ๋‹ค. + +โ€˜-โ€™๊ฐ€ ๋“ฑ์žฅํ•˜์ง€ ์•Š์œผ๋ฉด ๋ฌด์กฐ๊ฑด ๋”ํ•  ์ˆ˜ ๋ฐ–์— ์—†๋‹ค. + +ํ•˜์ง€๋งŒ โ€˜-โ€™๊ฐ€ ๋“ฑ์žฅํ•˜๋Š” ์ˆœ๊ฐ„๋ถ€ํ„ฐ๋Š” ๊ด„ํ˜ธ๋ฅผ ์ด์šฉํ•˜์—ฌ ๋ฌด์กฐ๊ฑด ํ›„์— ๋‚˜์˜ค๋Š” ์ˆ˜๋ฅผ ๋บ„์…ˆ์œผ๋กœ ๋งŒ๋“ค ์ˆ˜ ์žˆ๋‹ค. + +์˜ˆ๋ฅผ ๋“ค๋ฉด 55-50+40+90-100+10-11 ์ด๋ผ๋Š” ์ˆ˜์‹์ด ์ž…๋ ฅ๋์„ ๋•Œ 55-(50+40+90)-(100+10)-11 = 55-50-40-90-100-10-11 ๋กœ ๋งŒ๋“ค ์ˆ˜ ์žˆ๋‹ค. + +### ์ฝ”๋“œ + +```python +st = input().strip()+ '+' +total = 0 +minus = False +num = '' +for c in st: + if c.isdigit(): + num += c + else: + if minus: + total -= int(num) + else: + total += int(num) + if c == '-': + minus = True + + num = '' +print(total) +``` + +์ž…๋ ฅ์€ ์ˆซ์ž์™€ ๋ถ€ํ˜ธ๊ฐ€ ์„ž์ธ ๋ฌธ์ž์—ด๋กœ ์ฃผ์–ด์ง€๊ธฐ ๋•Œ๋ฌธ์— ๊ทธ ๋ฌธ์ž์—ด์„ ํ™•์ธํ•˜๋ฉด์„œ ๋ฌธ์ž๊ฐ€ ์ˆซ์ž์ธ์ง€ ๋ถ€ํ˜ธ์ธ์ง€์— ๋•Œ๋ผ ๋‹ค๋ฅด๊ฒŒ ์ฒ˜๋ฆฌ๋ฅผ ํ•ด์ฃผ์–ด์•ผ ํ•œ๋‹ค. + +1. ์ˆซ์ž + + ๋ถ€ํ˜ธ๊ฐ€ ๋“ค์–ด์˜ค๊ธฐ ์ „๊นŒ์ง€ ํ•˜๋‚˜์˜ ์ˆซ์ž์ด๊ธฐ ๋•Œ๋ฌธ์— ๋ฌธ์ž์—ด๋กœ ๊ณ„์† ์ด์–ด์ค€๋‹ค. + +2. + + + ๋งŒ์•ฝ โ€˜-โ€™ ๋ถ€ํ˜ธ๊ฐ€ ํ•œ ๋ฒˆ๋„ ๋‚˜์˜ค์ง€ ์•Š์•˜๋‹ค๋ฉด ๋”ํ•  ์ˆ˜ ๋ฐ–์— ์—†์œผ๋ฏ€๋กœ ๊ฒฐ๊ณผ๊ฐ€ ์ €์žฅ๋  `total`์— ์ •์ˆ˜๋กœ ์ด์ „๊นŒ์ง€ ๋งŒ๋“ค์–ด ๋†“์€ ๋ฌธ์ž์—ด์„ ์ •์ˆ˜๋กœ ๋ฐ”๊ฟ” ๋”ํ•œ๋‹ค. + +3. - + + โ€˜-โ€™๊ฐ€ ๋“ฑ์žฅํ•œ ์ˆœ๊ฐ„๋ถ€ํ„ฐ๋Š” ๋ฌด์กฐ๊ฑด ๋นผ๋ฉด ๋˜๊ธฐ ๋•Œ๋ฌธ์— `minus` ๋ณ€์ˆ˜๋ฅผ `True`๋กœ ๋ฐ”๊ฟ”์ค€๋‹ค. + + ์ดํ›„์˜ ์—ฐ์‚ฐ์€ `minus`๊ฐ€ `True`์ด๊ธฐ ๋•Œ๋ฌธ์— ์ด์ „๊นŒ์ง€ ๋งŒ๋“ค์–ด ๋†“์€ ๋ฌธ์ž์—ด์„ ์ •์ˆ˜๋กœ ๋ฐ”๊ฟ” ๋บ€๋‹ค. \ No newline at end of file diff --git a/_posts/2022-12-12-boj_1931.markdown b/_posts/2022-12-12-boj_1931.markdown new file mode 100644 index 00000000000..d9c71761cf8 --- /dev/null +++ b/_posts/2022-12-12-boj_1931.markdown @@ -0,0 +1,48 @@ +--- +title: "[BOJ] ํšŒ์˜์‹ค ๋ฐฐ์ • - 1931 (S1)" +author: kwon +date: 2022-12-12T23:00:00 +0900 +categories: [boj, silver] +tags: [greedy algorithm, sort] +math: true +mermaid: false +--- +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 2์ดˆ | 128 MB | + +# ๋ฌธ์ œ +ํ•œ ๊ฐœ์˜ ํšŒ์˜์‹ค์ด ์žˆ๋Š”๋ฐ ์ด๋ฅผ ์‚ฌ์šฉํ•˜๊ณ ์ž ํ•˜๋Š” N๊ฐœ์˜ ํšŒ์˜์— ๋Œ€ํ•˜์—ฌ ํšŒ์˜์‹ค ์‚ฌ์šฉํ‘œ๋ฅผ ๋งŒ๋“ค๋ ค๊ณ  ํ•œ๋‹ค. ๊ฐ ํšŒ์˜ I์— ๋Œ€ํ•ด ์‹œ์ž‘์‹œ๊ฐ„๊ณผ ๋๋‚˜๋Š” ์‹œ๊ฐ„์ด ์ฃผ์–ด์ ธ ์žˆ๊ณ , ๊ฐ ํšŒ์˜๊ฐ€ ๊ฒน์น˜์ง€ ์•Š๊ฒŒ ํ•˜๋ฉด์„œ ํšŒ์˜์‹ค์„ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ๋Š” ํšŒ์˜์˜ ์ตœ๋Œ€ ๊ฐœ์ˆ˜๋ฅผ ์ฐพ์•„๋ณด์ž. ๋‹จ, ํšŒ์˜๋Š” ํ•œ๋ฒˆ ์‹œ์ž‘ํ•˜๋ฉด ์ค‘๊ฐ„์— ์ค‘๋‹จ๋  ์ˆ˜ ์—†์œผ๋ฉฐ ํ•œ ํšŒ์˜๊ฐ€ ๋๋‚˜๋Š” ๊ฒƒ๊ณผ ๋™์‹œ์— ๋‹ค์Œ ํšŒ์˜๊ฐ€ ์‹œ์ž‘๋  ์ˆ˜ ์žˆ๋‹ค. ํšŒ์˜์˜ ์‹œ์ž‘์‹œ๊ฐ„๊ณผ ๋๋‚˜๋Š” ์‹œ๊ฐ„์ด ๊ฐ™์„ ์ˆ˜๋„ ์žˆ๋‹ค. ์ด ๊ฒฝ์šฐ์—๋Š” ์‹œ์ž‘ํ•˜์ž๋งˆ์ž ๋๋‚˜๋Š” ๊ฒƒ์œผ๋กœ ์ƒ๊ฐํ•˜๋ฉด ๋œ๋‹ค. + +# ์ž…๋ ฅ +์ฒซ์งธ ์ค„์— ํšŒ์˜์˜ ์ˆ˜ N(1 โ‰ค N โ‰ค 100,000)์ด ์ฃผ์–ด์ง„๋‹ค. ๋‘˜์งธ ์ค„๋ถ€ํ„ฐ N+1 ์ค„๊นŒ์ง€ ๊ฐ ํšŒ์˜์˜ ์ •๋ณด๊ฐ€ ์ฃผ์–ด์ง€๋Š”๋ฐ ์ด๊ฒƒ์€ ๊ณต๋ฐฑ์„ ์‚ฌ์ด์— ๋‘๊ณ  ํšŒ์˜์˜ ์‹œ์ž‘์‹œ๊ฐ„๊ณผ ๋๋‚˜๋Š” ์‹œ๊ฐ„์ด ์ฃผ์–ด์ง„๋‹ค. ์‹œ์ž‘ ์‹œ๊ฐ„๊ณผ ๋๋‚˜๋Š” ์‹œ๊ฐ„์€ $2^31-1$๋ณด๋‹ค ์ž‘๊ฑฐ๋‚˜ ๊ฐ™์€ ์ž์—ฐ์ˆ˜ ๋˜๋Š” 0์ด๋‹ค. + +# ์ถœ๋ ฅ +์ฒซ์งธ ์ค„์— ์ตœ๋Œ€ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ๋Š” ํšŒ์˜์˜ ์ตœ๋Œ€ ๊ฐœ์ˆ˜๋ฅผ ์ถœ๋ ฅํ•œ๋‹ค. + +**๋๋‚˜๋Š” ์‹œ๊ฐ„์„ ๊ธฐ์ค€์œผ๋กœ ๋จผ์ € ์ •๋ ฌ**ํ•˜๊ณ  ๊ฐ™์œผ๋ฉด ์‹œ์ž‘ํ•˜๋Š” ์‹œ๊ฐ„์„ ๊ธฐ์ค€์œผ๋กœ ์ •๋ ฌํ•˜๋Š” ๊ฒƒ์ด ํ•ต์‹ฌ์ด๋‹ค. + +์ด๋ ‡๊ฒŒ ์ •๋ ฌ์„ ํ•˜๊ฒŒ ๋˜๋ฉด ํ•ญ์ƒ ๊ทธ ์‹œ๊ฐ„์— ์‹œ์ž‘ํ•˜๋Š” ํšŒ์˜ ์ค‘ ๊ฐ€์žฅ ๋นจ๋ฆฌ ๋๋‚˜๋Š” ํšŒ์˜๋ฅผ ์„ ํƒํ•  ์ˆ˜ ์žˆ์œผ๋ฉฐ, ๋๋‚˜๋Š” ์‹œ๊ฐ„ ์ดํ›„์— ์‹œ์ž‘ํ•˜๋Š” ํšŒ์˜ ์ค‘ ๊ฐ€์žฅ ๋นจ๋ฆฌ ๋๋‚˜๋Š” ํšŒ์˜๋„ ๋ฐ”๋กœ ์„ ํƒํ•  ์ˆ˜ ์žˆ๋‹ค. + +## ์ฝ”๋“œ + +```python +n = int(input()) +meetings = [] + +for _ in range(n): + s, f = map(int, input().split()) + meetings.append((s, f)) + +meetings.sort(key=lambda x: (x[1], x[0])) + +cur = 0 +cnt = 0 +for s, f in meetings: + if cur <= s: + cur = f + cnt += 1 +print(cnt) +``` + +์•ž์„œ ๋งํ•œ ๊ธฐ์ค€์— ๋งž๊ฒŒ ์ •๋ ฌ์„ ํ•˜๊ณ  ํ˜„์žฌ ์„ ํƒํ•œ ํšŒ์˜๊ฐ€ ๋๋‚˜๋Š” ์‹œ๊ฐ„๋ณด๋‹ค ๋” ๋Šฆ๊ฒŒ ์‹œ์ž‘ํ•˜๋Š” ์ฒซ ํšŒ์˜๋ฅผ ์„ ํƒํ•˜๋ฉด ํ•ญ์ƒ ์ตœ์ ์˜ ์„ ํƒ์„ ํ•  ์ˆ˜ ์žˆ๋‹ค. \ No newline at end of file diff --git a/_posts/2022-12-13-boj_1946.markdown b/_posts/2022-12-13-boj_1946.markdown new file mode 100644 index 00000000000..ed087ac6860 --- /dev/null +++ b/_posts/2022-12-13-boj_1946.markdown @@ -0,0 +1,125 @@ +--- +title: "[BOJ] ์‹ ์ž… ์‚ฌ์› - 1946 (S1)" +author: kwon +date: 2022-12-13T23:00:00 +0900 +categories: [boj, silver] +tags: [greedy algorithm, sort] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 2์ดˆ | 256 MB | + +## ๋ฌธ์ œ + +์–ธ์ œ๋‚˜ ์ตœ๊ณ ๋งŒ์„ ์ง€ํ–ฅํ•˜๋Š” ๊ตด์ง€์˜ ๋Œ€๊ธฐ์—… ์ง„์˜ ์ฃผ์‹ํšŒ์‚ฌ๊ฐ€ ์‹ ๊ทœ ์‚ฌ์› ์ฑ„์šฉ์„ ์‹ค์‹œํ•œ๋‹ค. ์ธ์žฌ ์„ ๋ฐœ ์‹œํ—˜์€ 1์ฐจ ์„œ๋ฅ˜์‹ฌ์‚ฌ์™€ 2์ฐจ ๋ฉด์ ‘์‹œํ—˜์œผ๋กœ ์ด๋ฃจ์–ด์ง„๋‹ค. ์ตœ๊ณ ๋งŒ์„ ์ง€ํ–ฅํ•œ๋‹ค๋Š” ๊ธฐ์—…์˜ ์ด๋…์— ๋”ฐ๋ผ ๊ทธ๋“ค์€ ์ตœ๊ณ ์˜ ์ธ์žฌ๋“ค๋งŒ์„ ์‚ฌ์›์œผ๋กœ ์„ ๋ฐœํ•˜๊ณ  ์‹ถ์–ด ํ•œ๋‹ค. + +๊ทธ๋ž˜์„œ ์ง„์˜ ์ฃผ์‹ํšŒ์‚ฌ๋Š”, ๋‹ค๋ฅธ ๋ชจ๋“  ์ง€์›์ž์™€ ๋น„๊ตํ–ˆ์„ ๋•Œ ์„œ๋ฅ˜์‹ฌ์‚ฌ ์„ฑ์ ๊ณผ ๋ฉด์ ‘์‹œํ—˜ ์„ฑ์  ์ค‘ ์ ์–ด๋„ ํ•˜๋‚˜๊ฐ€ ๋‹ค๋ฅธ ์ง€์›์ž๋ณด๋‹ค ๋–จ์–ด์ง€์ง€ ์•Š๋Š” ์ž๋งŒ ์„ ๋ฐœํ•œ๋‹ค๋Š” ์›์น™์„ ์„ธ์› ๋‹ค. ์ฆ‰, ์–ด๋–ค ์ง€์›์ž A์˜ ์„ฑ์ ์ด ๋‹ค๋ฅธ ์–ด๋–ค ์ง€์›์ž B์˜ ์„ฑ์ ์— ๋น„ํ•ด ์„œ๋ฅ˜ ์‹ฌ์‚ฌ ๊ฒฐ๊ณผ์™€ ๋ฉด์ ‘ ์„ฑ์ ์ด ๋ชจ๋‘ ๋–จ์–ด์ง„๋‹ค๋ฉด A๋Š” ๊ฒฐ์ฝ” ์„ ๋ฐœ๋˜์ง€ ์•Š๋Š”๋‹ค. + +์ด๋Ÿฌํ•œ ์กฐ๊ฑด์„ ๋งŒ์กฑ์‹œํ‚ค๋ฉด์„œ, ์ง„์˜ ์ฃผ์‹ํšŒ์‚ฌ๊ฐ€ ์ด๋ฒˆ ์‹ ๊ทœ ์‚ฌ์› ์ฑ„์šฉ์—์„œ ์„ ๋ฐœํ•  ์ˆ˜ ์žˆ๋Š” ์‹ ์ž…์‚ฌ์›์˜ ์ตœ๋Œ€ ์ธ์›์ˆ˜๋ฅผ ๊ตฌํ•˜๋Š” ํ”„๋กœ๊ทธ๋žจ์„ ์ž‘์„ฑํ•˜์‹œ์˜ค. + +## ์ž…๋ ฅ + +์ฒซ์งธ ์ค„์—๋Š” ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์˜ ๊ฐœ์ˆ˜ T(1 โ‰ค T โ‰ค 20)๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. ๊ฐ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์˜ ์ฒซ์งธ ์ค„์— ์ง€์›์ž์˜ ์ˆซ์ž N(1 โ‰ค N โ‰ค 100,000)์ด ์ฃผ์–ด์ง„๋‹ค. ๋‘˜์งธ ์ค„๋ถ€ํ„ฐ N๊ฐœ ์ค„์—๋Š” ๊ฐ๊ฐ์˜ ์ง€์›์ž์˜ ์„œ๋ฅ˜์‹ฌ์‚ฌ ์„ฑ์ , ๋ฉด์ ‘ ์„ฑ์ ์˜ ์ˆœ์œ„๊ฐ€ ๊ณต๋ฐฑ์„ ์‚ฌ์ด์— ๋‘๊ณ  ํ•œ ์ค„์— ์ฃผ์–ด์ง„๋‹ค. ๋‘ ์„ฑ์  ์ˆœ์œ„๋Š” ๋ชจ๋‘ 1์œ„๋ถ€ํ„ฐ N์œ„๊นŒ์ง€ ๋™์„์ฐจ ์—†์ด ๊ฒฐ์ •๋œ๋‹ค๊ณ  ๊ฐ€์ •ํ•œ๋‹ค. + +## ์ถœ๋ ฅ + +๊ฐ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์— ๋Œ€ํ•ด์„œ ์ง„์˜ ์ฃผ์‹ํšŒ์‚ฌ๊ฐ€ ์„ ๋ฐœํ•  ์ˆ˜ ์žˆ๋Š” ์‹ ์ž…์‚ฌ์›์˜ ์ตœ๋Œ€ ์ธ์›์ˆ˜๋ฅผ ํ•œ ์ค„์— ํ•˜๋‚˜์”ฉ ์ถœ๋ ฅํ•œ๋‹ค. + +## ํ’€์ด + +### ์ฒซ ์‹œ๋„ + +๋ฒ”์œ„๊ฐ€ ์ •ํ•ด์ ธ ์žˆ๋Š” ์ •์ˆ˜ ๋ฐ์ดํ„ฐ์ด๊ธฐ ๋•Œ๋ฌธ์— conting sort๋ฅผ ๋ณ€ํ˜•ํ•˜์—ฌ ์ •๋ ฌ์„ ํ•˜์˜€๊ณ , ์ •๋ ฌ๋œ ๋ฐฐ์—ด์„ ๊ธฐ๋ฐ˜์œผ๋กœ ์Šฌ๋ผ์ด์‹ฑ์„ ํ•˜์—ฌ set์„ ๋งŒ๋“ค์–ด์„œ ํ’€์ด๋ฅผ ์‹œ๋„ํ–ˆ๋‹ค. + +```python +import sys +input = sys.stdin.readline +t = int(input()) + +for _ in range(t): + n = int(input()) + p_cnt = [0] * (n + 1) + p_cnt2 = [0] * (n + 1) + for _ in range(n): + s1, s2 = map(int, input().split()) + p_cnt[s1] = (s1, s2) + p_cnt2[s2] = (s1, s2) + top1 = p_cnt[1][1] + top2 = p_cnt2[1][0] + + if p_cnt[1][1] == 1: + print(1) + else: + cnt = 0 + set1 = set(p_cnt[1:top2 + 1]) + set2 = set(p_cnt2[1:top1 + 1]) + print(len(set1 & set2)) +``` + +| 3 | 6 | +| 7 | 3 | +| 4 | 2 | +| 1 | 4 | +| 5 | 7 | +| 2 | 5 | +| 6 | 1 | + +์œ„์™€ ๊ฐ™์€ ์˜ˆ์ œ๊ฐ€ ์žˆ๋‹ค๊ณ  ์ƒ๊ฐํ•ด๋ณด์ž. ๋จผ์ € ์ด๋ฅผ ์„ฑ์ ๊ณผ ๋ฉด์ ‘ ์ˆœ์œผ๋กœ ์ •๋ ฌํ•œ ๋ฆฌ์ŠคํŠธ๋ฅผ ๊ฐ๊ฐ ๋งŒ๋“ ๋‹ค. + +| 1 | 4 | | 6 | 1 | +| 2 | 5 | | 4 | 2 | +| 3 | 6 | | 7 | 3 | +| 4 | 2 | | 1 | 4 | +| 5 | 7 | | 2 | 5 | +| 6 | 1 | | 3 | 6 | +| 7 | 3 | | 5 | 7 | + +์—ฌ๊ธฐ์„œ ์ฒซ ๋ฒˆ์งธ ๋ฆฌ์ŠคํŠธ(์„ฑ์  ์ˆœ)๋Š” ๋ฉด์ ‘ ์ˆœ์œ„๊ฐ€ ๊ฐ€์žฅ ๋†’์€ ์‚ฌ๋žŒ์ด ๋‚˜์˜ฌ ๋•Œ๊นŒ์ง€ ์Šฌ๋ผ์ด์‹ฑ์„ ํ•˜์—ฌ set์œผ๋กœ ๋งŒ๋“ค์–ด ์ค€๋‹ค. set(p_cnt[1:top2 + 1]) + +๋ฐ˜๋Œ€๋กœ ๋‘ ๋ฒˆ์งธ ๋ฆฌ์ŠคํŠธ(๋ฉด์ ‘ ์ˆœ)๋Š” ์„ฑ์  ์ˆœ์œ„๊ฐ€ ๊ฐ€์žฅ ๋†’์€ ์‚ฌ๋žŒ์ด ๋‚˜์˜ฌ ๋•Œ๊นŒ์ง€ ์Šฌ๋ผ์ด์‹ฑ์„ ํ•˜์—ฌ set์œผ๋กœ ๋งŒ๋“ค์–ด ์ค€๋‹ค. set2 = set(p_cnt2[1:top1 + 1]) + +์ด๋ ‡๊ฒŒ ๋งŒ๋“ค๋ฉด ๊ฐ set์— ํ•œ ๋ถ„์•ผ์˜ 1 ์ˆœ์œ„์ธ ์‚ฌ๋žŒ๋ณด๋‹ค ๋‹ค๋ฅธ ๋ถ„์•ผ์˜ ์ˆœ์œ„๊ฐ€ ๋†’์€ ์‚ฌ๋žŒ๋“ค์„ ๊ฑธ๋Ÿฌ๋‚ผ ์ˆ˜ ์žˆ๊ณ , ์ด ๋‘˜์˜ ๊ต์ง‘ํ•ฉ์„ ์ด์šฉํ•˜๋ฉด ๋‘ ๋ถ„์•ผ์˜ 1 ์ˆœ์œ„์ธ ์‚ฌ๋žŒ๋“ค๋ณด๋‹ค ์ ์–ด๋„ ํ•˜๋‚˜์˜ ๋ถ„์•ผ์˜ ์ˆœ์œ„๊ฐ€ ๋†’์€ ์‚ฌ๋žŒ๋“ค์„ ์ถ”๋ ค๋‚ผ ์ˆ˜ ์žˆ์–ด์„œ ์ •๋‹ต์„ ๊ตฌํ•  ์ˆ˜ ์žˆ๋‹ค๊ณ  ์ƒ๊ฐํ–ˆ๋‹ค. + +์‹ค์ œ๋กœ๋„ ์œ„ ์˜ˆ์ œ๋กœ ์‹คํ–‰ํ•œ ๊ฒฐ๊ณผ๋Š” 3์œผ๋กœ ์ •์ƒ์ ์œผ๋กœ ํ†ต๊ณผํ•œ๋‹ค. + +ํ•˜์ง€๋งŒ ์•„๋ž˜์˜ ์˜ˆ์ œ์™€ ๊ฐ™์€ ๊ฒฝ์šฐ์—๋Š” ๋ฌธ์ œ๊ฐ€ ๋ฐœ์ƒํ•œ๋‹ค. + +| 1 | 4 | +| 2 | 2 | +| 3 | 3 | +| 4 | 1 | + +์ด ๊ฒฝ์šฐ์—๋Š” ์œ„์™€ ๊ฐ™์€ ๋ฐฉ๋ฒ•์œผ๋กœ ํ•˜๊ฒŒ ๋˜๋ฉด 4๋ช… ๋ชจ๋‘ set์— ๋“ค์–ด๊ฐ€๊ฒŒ ๋˜๋ฏ€๋กœ ์ถœ๋ ฅ์ด 4๊ฐ€ ๋‚˜์˜ค๊ฒŒ ๋œ๋‹ค. + +ํ•˜์ง€๋งŒ ์‹ค์ œ ์ •๋‹ต์€ (3, 3)์ด (2, 2)์— ๋ง‰ํžˆ๊ฒŒ ๋˜์–ด 3 ์ด ๋‚˜์™€์•ผ ํ•œ๋‹ค. + +### ์ˆ˜์ •ํ•œ ์ฝ”๋“œ + +```python +import sys +input = sys.stdin.readline + +t = int(input()) + +for _ in range(t): + n = int(input()) + p_cnt = [0] * (n + 1) + for _ in range(n): + s1, s2 = map(int, input().split()) + p_cnt[s1] = (s1, s2) + + cnt = 1 + best_s1, best_s2 = p_cnt[1] + for s1, s2 in p_cnt[2:]: + if best_s2 > s2: + cnt += 1 + best_s2 = s2 + print(cnt) +``` + +์„ฑ์  ์ˆœ์œผ๋กœ ์ •๋ ฌํ•œ ๋ฆฌ์ŠคํŠธ ํ•˜๋‚˜๋งŒ ์‚ฌ์šฉํ•œ๋‹ค. ์ด ๋ฆฌ์ŠคํŠธ๋ฅผ ๋Œ๋ฉด์„œ ํ˜„์žฌ ์ €์žฅ๋˜์–ด ์žˆ๋Š” ๋ฉด์ ‘ ์ˆœ์œ„๋ณด๋‹ค ๋†’์€ ์‚ฌ๋žŒ์„ ๋ฐœ๊ฒฌํ•  ๊ฒฝ์šฐ ์นด์šดํŠธ๋ฅผ ํ•˜๋‚˜ ๋Š˜๋ฆฌ๊ณ  ๊ทธ ์‚ฌ๋žŒ์˜ ์ˆœ์œ„๋ฅผ ์ €์žฅํ•˜์—ฌ ๋ฐ˜๋ณต๋ฌธ์„ ๊ณ„์† ์ง„ํ–‰ํ•œ๋‹ค. (์ดˆ๊ธฐ ๊ฐ’์€ ์„ฑ์  1์œ„์˜ ์‚ฌ๋žŒ) + +์ด๋ ‡๊ฒŒ ํ•˜๋ฉด ์„ฑ๊ณต์ ์œผ๋กœ ์–ด๋–ค ์‚ฌ๋žŒ๋ณด๋‹ค ๋‘ ์ˆœ์œ„๊ฐ€ ๋ชจ๋‘ ๋‚ฎ์€ ์‚ฌ๋žŒ์„ ๊ฑธ๋Ÿฌ๋‚ผ ์ˆ˜ ์žˆ๋‹ค. \ No newline at end of file diff --git a/_posts/2022-12-14-boj_1715.markdown b/_posts/2022-12-14-boj_1715.markdown new file mode 100644 index 00000000000..a155680d06a --- /dev/null +++ b/_posts/2022-12-14-boj_1715.markdown @@ -0,0 +1,62 @@ +--- +title: "[BOJ] ์นด๋“œ ์ •๋ ฌํ•˜๊ธฐ - 1715 (G4)" +author: kwon +date: 2022-12-13T23:00:00 +0900 +categories: [boj, gold] +tags: [data structure, greedy algorithm, priority queue] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 2์ดˆ | 128 MB | + +## ๋ฌธ์ œ + +์ •๋ ฌ๋œ ๋‘ ๋ฌถ์Œ์˜ ์ˆซ์ž ์นด๋“œ๊ฐ€ ์žˆ๋‹ค๊ณ  ํ•˜์ž. ๊ฐ ๋ฌถ์Œ์˜ ์นด๋“œ์˜ ์ˆ˜๋ฅผ A, B๋ผ ํ•˜๋ฉด ๋ณดํ†ต ๋‘ ๋ฌถ์Œ์„ ํ•ฉ์ณ์„œ ํ•˜๋‚˜๋กœ ๋งŒ๋“œ๋Š” ๋ฐ์—๋Š” A+B ๋ฒˆ์˜ ๋น„๊ต๋ฅผ ํ•ด์•ผ ํ•œ๋‹ค. ์ด๋ฅผํ…Œ๋ฉด, 20์žฅ์˜ ์ˆซ์ž ์นด๋“œ ๋ฌถ์Œ๊ณผ 30์žฅ์˜ ์ˆซ์ž ์นด๋“œ ๋ฌถ์Œ์„ ํ•ฉ์น˜๋ ค๋ฉด 50๋ฒˆ์˜ ๋น„๊ต๊ฐ€ ํ•„์š”ํ•˜๋‹ค. + +๋งค์šฐ ๋งŽ์€ ์ˆซ์ž ์นด๋“œ ๋ฌถ์Œ์ด ์ฑ…์ƒ ์œ„์— ๋†“์—ฌ ์žˆ๋‹ค. ์ด๋“ค์„ ๋‘ ๋ฌถ์Œ์”ฉ ๊ณจ๋ผ ์„œ๋กœ ํ•ฉ์ณ๋‚˜๊ฐ„๋‹ค๋ฉด, ๊ณ ๋ฅด๋Š” ์ˆœ์„œ์— ๋”ฐ๋ผ์„œ ๋น„๊ต ํšŸ์ˆ˜๊ฐ€ ๋งค์šฐ ๋‹ฌ๋ผ์ง„๋‹ค. ์˜ˆ๋ฅผ ๋“ค์–ด 10์žฅ, 20์žฅ, 40์žฅ์˜ ๋ฌถ์Œ์ด ์žˆ๋‹ค๋ฉด 10์žฅ๊ณผ 20์žฅ์„ ํ•ฉ์นœ ๋’ค, ํ•ฉ์นœ 30์žฅ ๋ฌถ์Œ๊ณผ 40์žฅ์„ ํ•ฉ์นœ๋‹ค๋ฉด (10 + 20) + (30 + 40) = 100๋ฒˆ์˜ ๋น„๊ต๊ฐ€ ํ•„์š”ํ•˜๋‹ค. ๊ทธ๋Ÿฌ๋‚˜ 10์žฅ๊ณผ 40์žฅ์„ ํ•ฉ์นœ ๋’ค, ํ•ฉ์นœ 50์žฅ ๋ฌถ์Œ๊ณผ 20์žฅ์„ ํ•ฉ์นœ๋‹ค๋ฉด (10 + 40) + (50 + 20) = 120 ๋ฒˆ์˜ ๋น„๊ต๊ฐ€ ํ•„์š”ํ•˜๋ฏ€๋กœ ๋œ ํšจ์œจ์ ์ธ ๋ฐฉ๋ฒ•์ด๋‹ค. + +N๊ฐœ์˜ ์ˆซ์ž ์นด๋“œ ๋ฌถ์Œ์˜ ๊ฐ๊ฐ์˜ ํฌ๊ธฐ๊ฐ€ ์ฃผ์–ด์งˆ ๋•Œ, ์ตœ์†Œํ•œ ๋ช‡ ๋ฒˆ์˜ ๋น„๊ต๊ฐ€ ํ•„์š”ํ•œ์ง€๋ฅผ ๊ตฌํ•˜๋Š” ํ”„๋กœ๊ทธ๋žจ์„ ์ž‘์„ฑํ•˜์‹œ์˜ค. + +## ์ž…๋ ฅ + +์ฒซ์งธ ์ค„์— N์ด ์ฃผ์–ด์ง„๋‹ค. (1 โ‰ค N โ‰ค 100,000) ์ด์–ด์„œ N๊ฐœ์˜ ์ค„์— ๊ฑธ์ณ ์ˆซ์ž ์นด๋“œ ๋ฌถ์Œ์˜ ๊ฐ๊ฐ์˜ ํฌ๊ธฐ๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. ์ˆซ์ž ์นด๋“œ ๋ฌถ์Œ์˜ ํฌ๊ธฐ๋Š” 1,000๋ณด๋‹ค ์ž‘๊ฑฐ๋‚˜ ๊ฐ™์€ ์–‘์˜ ์ •์ˆ˜์ด๋‹ค. + +## ์ถœ๋ ฅ + +์ฒซ์งธ ์ค„์— ์ตœ์†Œ ๋น„๊ต ํšŸ์ˆ˜๋ฅผ ์ถœ๋ ฅํ•œ๋‹ค. + +## ํ’€์ด + +๊ฐ€์žฅ ์ž‘์€ ์นด๋“œ ๋ญ‰์น˜๋ฅผ ์‹œ์ž‘์œผ๋กœ ๋งค๋ฒˆ ๊ฐ€์žฅ ์ž‘์€ ์นด๋“œ ๋ญ‰์น˜๋ฅผ ํ•ฉ์น˜๋ฉด ์ตœ์†Œ๋กœ ๋น„๊ตํ•˜๋ฉด์„œ ๋ชจ๋“  ์นด๋“œ๋ฅผ ํ•ฉ์น  ์ˆ˜ ์žˆ๋‹ค. ์™œ๋ƒํ•˜๋ฉด ์ด๋ ‡๊ฒŒ ํ•ด์•ผ๋งŒ ํ•ฉ์ณ์ง„ ์นด๋“œ ๋ญ‰์น˜์˜ ํฌ๊ธฐ๊ฐ€ ์ตœ์†Œ์ด๊ณ , ๋งค๋ฒˆ ํ•ฉ์ณ์ง„ ์นด๋“œ ๋ญ‰์น˜๊ฐ€ ์ตœ์†Œ์—ฌ์•ผ ๋น„๊ตํ•˜๋Š” ํšŸ์ˆ˜๋„ ์ตœ์†Œ์ด๊ธฐ ๋•Œ๋ฌธ์ด๋‹ค. + +๋งค๋ฒˆ ์ •๋ ฌ๋œ ์ˆ˜์—ด์ด ํ•„์š”ํ•˜๊ธฐ ๋•Œ๋ฌธ์— ์‚ฝ์ž… ์‹œ์— ์ •๋ ฌ์„ ํ•ด์น˜์ง€ ์•Š๋Š” ์šฐ์„ ์ˆœ์œ„ ํ๋ฅผ ์ด์šฉํ•˜์—ฌ ํ’€์—ˆ๋‹ค. + +```python +from queue import PriorityQueue + +n = int(input()) +pq = PriorityQueue() + +for _ in range(n): + num = int(input()) + pq.put(num) + +result = 0 + +while pq.qsize() > 1: + tmp = pq.get() + num = pq.get() + result += tmp + num + pq.put(tmp + num) + +print(result) +``` + +์šฐ์„ ์ˆœ์œ„ ํ์— ์ž…๋ ฅ์œผ๋กœ ๋“ค์–ด์˜จ ์นด๋“œ ๋ญ‰์น˜์˜ ํฌ๊ธฐ๋ฅผ ์‚ฝ์ž…ํ•˜๊ณ  ๋ฐ˜๋ณต๋ฌธ์„ ์‹œ์ž‘ํ•œ๋‹ค. + +์šฐ์„ ์ˆœ์œ„ ํ์˜ ์ฒซ ๋ฒˆ์งธ, ๋‘ ๋ฒˆ์งธ ์›์†Œ๋ฅผ ๋นผ๋‚ด์„œ ๋”ํ•ด์ค€๋‹ค (์นด๋“œ ๋ญ‰์น˜๋ฅผ ํ•ฉ์นจ). ๊ทธ๋ฆฌ๊ณ  ์ด๋ฅผ ๊ฒฐ๊ณผ๊ฐ€ ์ €์žฅ๋  result ํ•จ์ˆ˜์— ์ €์žฅํ•ด์ค€๋‹ค (ํ•ฉ์น  ๋•Œ ๋น„๊ตํ•œ ํšŸ์ˆ˜๋ฅผ ๋ฐ˜์˜). ๋งˆ์ง€๋ง‰์œผ๋กœ ์ฒซ ๋ฒˆ์งธ์™€ ๋‘ ๋ฒˆ์งธ ์›์†Œ์˜ ํ•ฉ์„ ๋‹ค์‹œ ์šฐ์„ ์ˆœ์œ„ ํ์— ๋„ฃ์–ด์ค€๋‹ค. ๋‹ค์‹œ ๋„ฃ์–ด์ฃผ๋ฉด ํ•ฉ์ณ์ง„ ์นด๋“œ ๋ญ‰์น˜๋ฅผ ์ž์—ฐ์Šค๋Ÿฝ๊ฒŒ ๋‹ค์‹œ ํ•ฉ์น  ์ˆ˜ ์žˆ๋‹ค. + +์ด๋ฅผ ์šฐ์„ ์ˆœ์œ„ ํ์˜ ๊ธธ์ด๊ฐ€ 1๋ณด๋‹ค ์ž‘์•„์งˆ ๋•Œ๊นŒ์ง€ ๋ฐ˜๋ณตํ•œ๋‹ค. ํ•˜๋‚˜๋งŒ ๋‚จ์•˜๋‹ค๋Š” ๊ฒƒ์ด ๋งˆ์ง€๋ง‰ ๋ง์…ˆ์ด ๋๋‚ฌ๋‹ค๋Š” ๋œป์ด๊ธฐ ๋•Œ๋ฌธ์— ํ๊ฐ€ ๋ชจ๋‘ ๋นŒ ๋•Œ๊นŒ์ง€ ๋ฐ˜๋ณตํ•  ํ•„์š” ์—†๋‹ค. \ No newline at end of file diff --git a/_posts/2022-12-16-boj_2217.markdown b/_posts/2022-12-16-boj_2217.markdown new file mode 100644 index 00000000000..4cd8b2b4357 --- /dev/null +++ b/_posts/2022-12-16-boj_2217.markdown @@ -0,0 +1,52 @@ +--- +title: "[BOJ] ๋กœํ”„ - 2217 (S4)" +author: kwon +date: 2022-12-16T23:00:00 +0900 +categories: [boj, silver] +tags: [math, greedy algorithm, sort] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 2์ดˆ | 192 MB | + +## ๋ฌธ์ œ + +N(1 โ‰ค N โ‰ค 100,000)๊ฐœ์˜ ๋กœํ”„๊ฐ€ ์žˆ๋‹ค. ์ด ๋กœํ”„๋ฅผ ์ด์šฉํ•˜์—ฌ ์ด๋Ÿฐ ์ €๋Ÿฐ ๋ฌผ์ฒด๋ฅผ ๋“ค์–ด์˜ฌ๋ฆด ์ˆ˜ ์žˆ๋‹ค. ๊ฐ๊ฐ์˜ ๋กœํ”„๋Š” ๊ทธ ๊ตต๊ธฐ๋‚˜ ๊ธธ์ด๊ฐ€ ๋‹ค๋ฅด๊ธฐ ๋•Œ๋ฌธ์— ๋“ค ์ˆ˜ ์žˆ๋Š” ๋ฌผ์ฒด์˜ ์ค‘๋Ÿ‰์ด ์„œ๋กœ ๋‹ค๋ฅผ ์ˆ˜๋„ ์žˆ๋‹ค. + +ํ•˜์ง€๋งŒ ์—ฌ๋Ÿฌ ๊ฐœ์˜ ๋กœํ”„๋ฅผ ๋ณ‘๋ ฌ๋กœ ์—ฐ๊ฒฐํ•˜๋ฉด ๊ฐ๊ฐ์˜ ๋กœํ”„์— ๊ฑธ๋ฆฌ๋Š” ์ค‘๋Ÿ‰์„ ๋‚˜๋ˆŒ ์ˆ˜ ์žˆ๋‹ค. k๊ฐœ์˜ ๋กœํ”„๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ์ค‘๋Ÿ‰์ด w์ธ ๋ฌผ์ฒด๋ฅผ ๋“ค์–ด์˜ฌ๋ฆด ๋•Œ, ๊ฐ๊ฐ์˜ ๋กœํ”„์—๋Š” ๋ชจ๋‘ ๊ณ ๋ฅด๊ฒŒ w/k ๋งŒํผ์˜ ์ค‘๋Ÿ‰์ด ๊ฑธ๋ฆฌ๊ฒŒ ๋œ๋‹ค. + +๊ฐ ๋กœํ”„๋“ค์— ๋Œ€ํ•œ ์ •๋ณด๊ฐ€ ์ฃผ์–ด์กŒ์„ ๋•Œ, ์ด ๋กœํ”„๋“ค์„ ์ด์šฉํ•˜์—ฌ ๋“ค์–ด์˜ฌ๋ฆด ์ˆ˜ ์žˆ๋Š” ๋ฌผ์ฒด์˜ ์ตœ๋Œ€ ์ค‘๋Ÿ‰์„ ๊ตฌํ•ด๋‚ด๋Š” ํ”„๋กœ๊ทธ๋žจ์„ ์ž‘์„ฑํ•˜์‹œ์˜ค. ๋ชจ๋“  ๋กœํ”„๋ฅผ ์‚ฌ์šฉํ•ด์•ผ ํ•  ํ•„์š”๋Š” ์—†์œผ๋ฉฐ, ์ž„์˜๋กœ ๋ช‡ ๊ฐœ์˜ ๋กœํ”„๋ฅผ ๊ณจ๋ผ์„œ ์‚ฌ์šฉํ•ด๋„ ๋œ๋‹ค. + +## ์ž…๋ ฅ + +์ฒซ์งธ ์ค„์— ์ •์ˆ˜ N์ด ์ฃผ์–ด์ง„๋‹ค. ๋‹ค์Œ N๊ฐœ์˜ ์ค„์—๋Š” ๊ฐ ๋กœํ”„๊ฐ€ ๋ฒ„ํ‹ธ ์ˆ˜ ์žˆ๋Š” ์ตœ๋Œ€ ์ค‘๋Ÿ‰์ด ์ฃผ์–ด์ง„๋‹ค. ์ด ๊ฐ’์€ 10,000์„ ๋„˜์ง€ ์•Š๋Š” ์ž์—ฐ์ˆ˜์ด๋‹ค. + +## ์ถœ๋ ฅ + +์ฒซ์งธ ์ค„์— ๋‹ต์„ ์ถœ๋ ฅํ•œ๋‹ค. + +## ํ’€์ด + +๋‚ด๋ฆผ์ฐจ์ˆœ์œผ๋กœ ์ •๋ ฌํ•œ ๋‹ค์Œ ์ˆœ์„œ๋Œ€๋กœ ๋“ค ์ˆ˜ ์žˆ๋Š” ๋ฌด๊ฒŒ๋ฅผ ๋น„๊ตํ•œ๋‹ค. + +```python +n = int(input()) +result = 0 +cnt = 0 +rope = [] +for i in range(0, n): + rope.append(int(input())) + +rope.sort(reverse=True) +max_w = 0 +for cnt, r in enumerate(rope): + if max_w <= r * (cnt + 1): + max_w = r * (cnt + 1) + +print(max_w) +``` + +๋ฐ˜๋ณต๋ฌธ์„ ๋Œ๋ฉด์„œ (๋กœํ”„์˜ ๊ฐœ์ˆ˜) * (์ง€๊ธˆ ํ™•์ธํ•œ ๋กœํ”„์˜ ๋ฌด๊ฒŒ)์˜ ๊ฐ’์ด ์ €์žฅํ•ด ๋†จ๋˜ ๋ฌด๊ฒŒ `max_w` ๋ณด๋‹ค ํฌ๋ฉด `max_w`๋ฅผ ๋ฐ”๊ฟ”์ค€๋‹ค. \ No newline at end of file diff --git a/_posts/2022-12-3-boj_1016.markdown b/_posts/2022-12-3-boj_1016.markdown new file mode 100644 index 00000000000..7511ba79bc9 --- /dev/null +++ b/_posts/2022-12-3-boj_1016.markdown @@ -0,0 +1,79 @@ +--- +title: "[BOJ] ์ œ๊ณฑ ใ„ดใ„ด์ˆ˜ - 1016 (G1)" +author: Kwon +date: 2022-12-03T23:00:00 +0900 +categories: [boj, gold] +tags: [prime number, sieve of Eratosthenes] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +|:-----------:|:-----------:| +| 2์ดˆ | 512 MB | + +## ๋ฌธ์ œ +์–ด๋–ค ์ •์ˆ˜ X๊ฐ€ 1๋ณด๋‹ค ํฐ ์ œ๊ณฑ์ˆ˜๋กœ ๋‚˜๋ˆ„์–ด ๋–จ์–ด์ง€์ง€ ์•Š์„ ๋•Œ, ๊ทธ ์ˆ˜๋ฅผ ์ œ๊ณฑใ„ดใ„ด์ˆ˜๋ผ๊ณ  ํ•œ๋‹ค. ์ œ๊ณฑ์ˆ˜๋Š” ์ •์ˆ˜์˜ ์ œ๊ณฑ์ด๋‹ค. min๊ณผ max๊ฐ€ ์ฃผ์–ด์ง€๋ฉด, min๋ณด๋‹ค ํฌ๊ฑฐ๋‚˜ ๊ฐ™๊ณ , max๋ณด๋‹ค ์ž‘๊ฑฐ๋‚˜ ๊ฐ™์€ ์ œ๊ณฑใ„ดใ„ด์ˆ˜๊ฐ€ ๋ช‡ ๊ฐœ ์žˆ๋Š”์ง€ ์ถœ๋ ฅํ•œ๋‹ค. + +## ์ž…๋ ฅ +์ฒซ์งธ ์ค„์— ๋‘ ์ •์ˆ˜ min๊ณผ max๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. + +## ์ถœ๋ ฅ +์ฒซ์งธ ์ค„์— ๋‘ ์ •์ˆ˜ min๊ณผ max๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. + +## ์ œํ•œ +* 1 โ‰ค min โ‰ค 1,000,000,000,000 +* min โ‰ค max โ‰ค min + 1,000,000 + +## ํ’€์ด +2๋ถ€ํ„ฐ ์ฆ๊ฐ€ํ•˜๋ฉด์„œ ์ œ๊ณฑ ์ˆ˜์˜ ๋ฐฐ์ˆ˜๋“ค์„ ๋นผ์ฃผ๋Š” ์‹์œผ๋กœ ์ œ๊ณฑ ใ„ดใ„ด ์ˆ˜๋ฅผ ์ฐพ์•„๊ฐ€๋Š” ๋ฐฉ์‹์œผ๋กœ ๋ฌธ์ œ๋ฅผ ํ•ด๊ฒฐํ•˜๋ ค ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์‹œ๋„ํ•˜์˜€๋‹ค. + +### ์ฒซ ์‹œ๋„์˜ ๋ฌธ์ œ์  +```python +n, m = map(int, input().split()) +sqr = [False] * (m - n + 1) +cnt = 0 +for i in range(2, int(m ** 0.5) + 1): + j = 1 + sqr_num = i ** 2 + while sqr_num * j <= m: + if sqr_num * j < n: pass + elif not sqr[sqr_num * j - n]: + sqr[sqr_num * j - n] = True + cnt += 1 + j += 1 +print((m - n + 1) - cnt) +``` + +์†Œ์ˆ˜์˜ ๋ฐฐ์ˆ˜๋ฅผ ๋นผ๋ฉฐ ์†Œ์ˆ˜๋ฅผ ์ฐพ์•„๊ฐ€๋Š” ์—๋ผํ† ์Šคํ…Œ๋„ค์Šค์˜ ์ฒด(sieve of Eratosthenes) ์•Œ๊ณ ๋ฆฌ์ฆ˜์„ ์ œ๊ณฑ ใ„ดใ„ด ์ˆ˜์— ๋งž๊ฒŒ ๋ณ€ํ˜•ํ•˜์—ฌ ์‚ฌ์šฉํ•˜์˜€๋‹ค. + +ํ•˜์ง€๋งŒ ์œ„ ์ฝ”๋“œ๋Š” ์ฒด์ถœ์‹œ ์‹œ๊ฐ„ ์ดˆ๊ณผ๊ฐ€ ๋ฐœ์ƒํ•œ๋‹ค. ๋ฌธ์ œ์—์„œ ์ฃผ์–ด์ง„ ์ˆ˜์˜ ๋ฒ”์œ„๋ฅผ ๋ณด๋ฉด ์ตœ์†Œ๊ฐ€ 1,000,001,000,000(= 1,000,000,000,000 + 1,000,000)๊นŒ์ง€ ์ฃผ์–ด์งˆ ์ˆ˜ ์žˆ๊ธฐ ๋•Œ๋ฌธ์— `j`๊ฐ€ 1๋ถ€ํ„ฐ ์‹œ์ž‘ํ•˜๋ฉด ์—„์ฒญ๋‚œ ์‹œ๊ฐ„ ๋‚ญ๋น„๊ฐ€ ๋ฐœ์ƒํ•œ๋‹ค. + +n์ด๋ผ๋Š” ์ตœ์†Œ๊ฐ’์ด ์กด์žฌํ•˜๊ธฐ ๋•Œ๋ฌธ์— `sqr_num * j`๊ฐ€ `n`๋ถ€ํ„ฐ ๋ฐ”๋กœ ์‹œ์ž‘ํ•  ์ˆ˜ ์žˆ๋„๋ก ํ•ด์ฃผ๋ฉด ์“ธ๋ชจ์—†๋Š” ์—ฐ์‚ฐ์˜ ๋‚ญ๋น„๋ฅผ ์ค„์ผ ์ˆ˜ ์žˆ๋‹ค. + +```python +# ์‹คํŒจ +j = 1 + +# ์„ฑ๊ณต +j = n // sqr +``` + +๊ทธ๋ž˜์„œ ์œ„์™€ ๊ฐ™์ด `j`๋ฅผ `n // sqr`๋ถ€ํ„ฐ ์‹œ์ž‘ํ•˜๋„๋ก ํ•˜์—ฌ ์ฝ”๋“œ๋ฅผ ์ˆ˜์ •ํ•˜์˜€๋‹ค. + +### ์ˆ˜์ •ํ•œ ์ฝ”๋“œ +```python +n, m = map(int, input().split()) +cnt = 0 +prime = [1] * (m - n + 1) +i = 2 +while i ** 2 <= m: + sqr = i ** 2 + j = n // sqr + while sqr * j <= m: + if sqr * j >= n and prime[sqr * j - n] == 1: + prime[sqr * j - n] = 0 + j += 1 + i += 1 +print(sum(prime)) +``` \ No newline at end of file diff --git a/_posts/2022-12-9-boj_13460.markdown b/_posts/2022-12-9-boj_13460.markdown new file mode 100644 index 00000000000..08d1031a04a --- /dev/null +++ b/_posts/2022-12-9-boj_13460.markdown @@ -0,0 +1,281 @@ +--- +title: "[BOJ] ๊ตฌ์Šฌ ํƒˆ์ถœ 2 - 13460 (G1)" +author: kwon +date: 2022-12-9T23:00:00 +0900 +categories: [boj, gold] +tags: [implementation, graph, bfs, simulation] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +|:-----------:|:-----------:| +| 2์ดˆ | 512 MB | + +## ๋ฌธ์ œ +์Šคํƒ€ํŠธ๋งํฌ์—์„œ ํŒ๋งคํ•˜๋Š” ์–ด๋ฆฐ์ด์šฉ ์žฅ๋‚œ๊ฐ ์ค‘์—์„œ ๊ฐ€์žฅ ์ธ๊ธฐ๊ฐ€ ๋งŽ์€ ์ œํ’ˆ์€ ๊ตฌ์Šฌ ํƒˆ์ถœ์ด๋‹ค. ๊ตฌ์Šฌ ํƒˆ์ถœ์€ ์ง์‚ฌ๊ฐํ˜• ๋ณด๋“œ์— ๋นจ๊ฐ„ ๊ตฌ์Šฌ๊ณผ ํŒŒ๋ž€ ๊ตฌ์Šฌ์„ ํ•˜๋‚˜์”ฉ ๋„ฃ์€ ๋‹ค์Œ, ๋นจ๊ฐ„ ๊ตฌ์Šฌ์„ ๊ตฌ๋ฉ์„ ํ†ตํ•ด ๋นผ๋‚ด๋Š” ๊ฒŒ์ž„์ด๋‹ค. + +๋ณด๋“œ์˜ ์„ธ๋กœ ํฌ๊ธฐ๋Š” N, ๊ฐ€๋กœ ํฌ๊ธฐ๋Š” M์ด๊ณ , ํŽธ์˜์ƒ 1ร—1ํฌ๊ธฐ์˜ ์นธ์œผ๋กœ ๋‚˜๋ˆ„์–ด์ ธ ์žˆ๋‹ค. ๊ฐ€์žฅ ๋ฐ”๊นฅ ํ–‰๊ณผ ์—ด์€ ๋ชจ๋‘ ๋ง‰ํ˜€์ ธ ์žˆ๊ณ , ๋ณด๋“œ์—๋Š” ๊ตฌ๋ฉ์ด ํ•˜๋‚˜ ์žˆ๋‹ค. ๋นจ๊ฐ„ ๊ตฌ์Šฌ๊ณผ ํŒŒ๋ž€ ๊ตฌ์Šฌ์˜ ํฌ๊ธฐ๋Š” ๋ณด๋“œ์—์„œ 1ร—1ํฌ๊ธฐ์˜ ์นธ์„ ๊ฐ€๋“ ์ฑ„์šฐ๋Š” ์‚ฌ์ด์ฆˆ์ด๊ณ , ๊ฐ๊ฐ ํ•˜๋‚˜์”ฉ ๋“ค์–ด๊ฐ€ ์žˆ๋‹ค. ๊ฒŒ์ž„์˜ ๋ชฉํ‘œ๋Š” ๋นจ๊ฐ„ ๊ตฌ์Šฌ์„ ๊ตฌ๋ฉ์„ ํ†ตํ•ด์„œ ๋นผ๋‚ด๋Š” ๊ฒƒ์ด๋‹ค. ์ด๋•Œ, **ํŒŒ๋ž€ ๊ตฌ์Šฌ์ด ๊ตฌ๋ฉ์— ๋“ค์–ด๊ฐ€๋ฉด ์•ˆ ๋œ๋‹ค**. + +์ด๋•Œ, ๊ตฌ์Šฌ์„ ์†์œผ๋กœ ๊ฑด๋“œ๋ฆด ์ˆ˜๋Š” ์—†๊ณ , ์ค‘๋ ฅ์„ ์ด์šฉํ•ด์„œ ์ด๋ฆฌ ์ €๋ฆฌ ๊ตด๋ ค์•ผ ํ•œ๋‹ค. ์™ผ์ชฝ์œผ๋กœ ๊ธฐ์šธ์ด๊ธฐ, ์˜ค๋ฅธ์ชฝ์œผ๋กœ ๊ธฐ์šธ์ด๊ธฐ, ์œ„์ชฝ์œผ๋กœ ๊ธฐ์šธ์ด๊ธฐ, ์•„๋ž˜์ชฝ์œผ๋กœ ๊ธฐ์šธ์ด๊ธฐ์™€ ๊ฐ™์€ ๋„ค ๊ฐ€์ง€ ๋™์ž‘์ด ๊ฐ€๋Šฅํ•˜๋‹ค. + +๊ฐ๊ฐ์˜ ๋™์ž‘์—์„œ ๊ณต์€ ๋™์‹œ์— ์›€์ง์ธ๋‹ค. ๋นจ๊ฐ„ ๊ตฌ์Šฌ์ด ๊ตฌ๋ฉ์— ๋น ์ง€๋ฉด ์„ฑ๊ณต์ด์ง€๋งŒ, ํŒŒ๋ž€ ๊ตฌ์Šฌ์ด ๊ตฌ๋ฉ์— ๋น ์ง€๋ฉด ์‹คํŒจ์ด๋‹ค. **๋นจ๊ฐ„ ๊ตฌ์Šฌ๊ณผ ํŒŒ๋ž€ ๊ตฌ์Šฌ์ด ๋™์‹œ์— ๊ตฌ๋ฉ์— ๋น ์ ธ๋„ ์‹คํŒจ์ด๋‹ค**. ๋นจ๊ฐ„ ๊ตฌ์Šฌ๊ณผ ํŒŒ๋ž€ ๊ตฌ์Šฌ์€ ๋™์‹œ์— ๊ฐ™์€ ์นธ์— ์žˆ์„ ์ˆ˜ ์—†๋‹ค. ๋˜, ๋นจ๊ฐ„ ๊ตฌ์Šฌ๊ณผ ํŒŒ๋ž€ ๊ตฌ์Šฌ์˜ ํฌ๊ธฐ๋Š” ํ•œ ์นธ์„ ๋ชจ๋‘ ์ฐจ์ง€ํ•œ๋‹ค. ๊ธฐ์šธ์ด๋Š” ๋™์ž‘์„ ๊ทธ๋งŒํ•˜๋Š” ๊ฒƒ์€ ๋” ์ด์ƒ ๊ตฌ์Šฌ์ด ์›€์ง์ด์ง€ ์•Š์„ ๋•Œ ๊นŒ์ง€์ด๋‹ค. + +๋ณด๋“œ์˜ ์ƒํƒœ๊ฐ€ ์ฃผ์–ด์กŒ์„ ๋•Œ, ์ตœ์†Œ ๋ช‡ ๋ฒˆ ๋งŒ์— ๋นจ๊ฐ„ ๊ตฌ์Šฌ์„ ๊ตฌ๋ฉ์„ ํ†ตํ•ด ๋นผ๋‚ผ ์ˆ˜ ์žˆ๋Š”์ง€ ๊ตฌํ•˜๋Š” ํ”„๋กœ๊ทธ๋žจ์„ ์ž‘์„ฑํ•˜์‹œ์˜ค. + +## ์ž…๋ ฅ +์ฒซ ๋ฒˆ์งธ ์ค„์—๋Š” ๋ณด๋“œ์˜ ์„ธ๋กœ, ๊ฐ€๋กœ ํฌ๊ธฐ๋ฅผ ์˜๋ฏธํ•˜๋Š” ๋‘ ์ •์ˆ˜ N, M (3 โ‰ค N, M โ‰ค 10)์ด ์ฃผ์–ด์ง„๋‹ค. ๋‹ค์Œ N๊ฐœ์˜ ์ค„์— ๋ณด๋“œ์˜ ๋ชจ์–‘์„ ๋‚˜ํƒ€๋‚ด๋Š” ๊ธธ์ด M์˜ ๋ฌธ์ž์—ด์ด ์ฃผ์–ด์ง„๋‹ค. ์ด ๋ฌธ์ž์—ด์€ '.', '#', 'O', 'R', 'B' ๋กœ ์ด๋ฃจ์–ด์ ธ ์žˆ๋‹ค. '.'์€ ๋นˆ ์นธ์„ ์˜๋ฏธํ•˜๊ณ , '#'์€ ๊ณต์ด ์ด๋™ํ•  ์ˆ˜ ์—†๋Š” ์žฅ์• ๋ฌผ ๋˜๋Š” ๋ฒฝ์„ ์˜๋ฏธํ•˜๋ฉฐ, 'O'๋Š” ๊ตฌ๋ฉ์˜ ์œ„์น˜๋ฅผ ์˜๋ฏธํ•œ๋‹ค. 'R'์€ ๋นจ๊ฐ„ ๊ตฌ์Šฌ์˜ ์œ„์น˜, 'B'๋Š” ํŒŒ๋ž€ ๊ตฌ์Šฌ์˜ ์œ„์น˜์ด๋‹ค. + +์ž…๋ ฅ๋˜๋Š” ๋ชจ๋“  ๋ณด๋“œ์˜ ๊ฐ€์žฅ์ž๋ฆฌ์—๋Š” ๋ชจ๋‘ '#'์ด ์žˆ๋‹ค. ๊ตฌ๋ฉ์˜ ๊ฐœ์ˆ˜๋Š” ํ•œ ๊ฐœ ์ด๋ฉฐ, ๋นจ๊ฐ„ ๊ตฌ์Šฌ๊ณผ ํŒŒ๋ž€ ๊ตฌ์Šฌ์€ ํ•ญ์ƒ 1๊ฐœ๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. + +## ์ถœ๋ ฅ +์ตœ์†Œ ๋ช‡ ๋ฒˆ ๋งŒ์— ๋นจ๊ฐ„ ๊ตฌ์Šฌ์„ ๊ตฌ๋ฉ์„ ํ†ตํ•ด ๋นผ๋‚ผ ์ˆ˜ ์žˆ๋Š”์ง€ ์ถœ๋ ฅํ•œ๋‹ค. ๋งŒ์•ฝ, 10๋ฒˆ ์ดํ•˜๋กœ ์›€์ง์—ฌ์„œ ๋นจ๊ฐ„ ๊ตฌ์Šฌ์„ ๊ตฌ๋ฉ์„ ํ†ตํ•ด ๋นผ๋‚ผ ์ˆ˜ ์—†์œผ๋ฉด -1์„ ์ถœ๋ ฅํ•œ๋‹ค. + +## ํ’€์ด + +๋งต์„ BFS๋กœ ํƒ์ƒ‰ํ•˜๋ฉด์„œ ์‹œ๋ฎฌ๋ ˆ์ด์…˜ ํ•˜๋Š” ๋ฌธ์ œ์ด๋‹ค. + +์ฒ˜์Œ ์‹œ๋„ํ•œ ๋ฐฉ์‹์€ ์ •๋ง ๋‹จ์ˆœํ•˜๊ฒŒ ๋ชจ๋‘ ๋‹ค ๊ตฌํ˜„ํ•˜๋Š” ๊ฒƒ์œผ๋กœ ๋งต ๋ฆฌ์ŠคํŠธ ์ž์ฒด์—์„œ ๊ตฌ์Šฌ๋“ค์„ ์‹ค์ œ๋กœ ์›€์ง์ด๋Š” ๊ฒƒ๊นŒ์ง€ ๊ตฌํ˜„ํ•˜์˜€๋‹ค. + +```python +# ํ…Œ๋‘๋ฆฌ์˜ '#'์€ ํ•„์š”์—†์„ ๊ฒƒ์ด๋ผ ํŒ๋‹จํ•˜๊ณ  ๋นผ๊ณ  ์ž…๋ ฅ ๋ฐ›์Œ +n, m = map(int, input().split()) +o, r, b = (-1, ), (-1, ), (-1, ) +input() +map_list = [] +for i in range(n - 2): + row = input()[1:-1] + if o[0] == -1: + o = (row.find('O'), i) + if r[0] == -1: + r = (row.find('R'), i) + if b[0] == -1: + b = (row.find('B'), i) + + map_list.append(list(row)) +input() + +# ๊ธฐ์šธ์˜€์„ ๋•Œ ๊ตฌ์Šฌ์„ ์›€์ง์ด๋Š” ํ•จ์ˆ˜ +def tilt(map_list, dir, r, b): + rx, ry = r + bx, by = b + dir_x, dir_y = dir + + next_rx, next_ry = rx + dir_x, ry + dir_y # ํ•œ ์นธ ์ด๋™ + next_bx, next_by = bx + dir_x, by + dir_y + move_r = 0 <= next_ry < n - 2 and 0 <= next_rx < m - 2 + move_b = 0 <= next_by < n - 2 and 0 <= next_bx < m - 2 + + while move_r or move_b: + + if move_r: + next_r = map_list[next_ry][next_rx] + if next_r != '#' and not ((next_rx, next_ry) == (bx, by) and not move_b) and (rx, ry) != o: + rx = next_rx + ry = next_ry + elif (rx, ry) != o: + move_r = False + elif move_r: + move_r = False + + if move_r: + next_rx, next_ry = rx + dir_x, ry + dir_y # ํ•œ ์นธ ์ด๋™ + move_r = 0 <= next_ry < n - 2 and 0 <= next_rx < m - 2 + + if move_b: + next_b = map_list[next_by][next_bx] + if next_b != '#' and not((next_bx, next_by) == (rx, ry) and not move_r): + if next_b == 'O': + return -1, -1, -1, -1 + bx = next_bx + by = next_by + else: + move_b = False + elif move_b: + move_b = False + + if move_b: + next_bx, next_by = bx + dir_x, by + dir_y + move_b = 0 <= next_by < n - 2 and 0 <= next_bx < m - 2 + + return rx, ry, bx, by + +# ์›€์ง์ธ ์ขŒํ‘œ๋ฅผ ๋ฐ”ํƒ•์œผ๋กœ ์‹ค์ œ ๋งต ๋ฆฌ์ŠคํŠธ์— ๊ตฌ์Šฌ์„ ํ‘œ์‹œํ•˜๋Š” ํ•จ์ˆ˜ +def move(dir, r, b): + m_rx, m_ry, m_bx, m_by = tilt(map_list, dir, r, b) + if m_rx == -1: return (-1, -1) # + elif (m_rx, m_ry) == o: return (True, True) + else: + map_list[r[1]][r[0]], map_list[m_ry][m_rx] = map_list[m_ry][m_rx], map_list[r[1]][r[0]] + map_list[b[1]][b[0]], map_list[m_by][m_bx] = map_list[m_by][m_bx], map_list[b[1]][b[0]] + r, b = (m_rx, m_ry), (m_bx, m_by) + return r, b + +visited = [[0] * (m - 2) for _ in range(n - 2)] + +# bfs๋ฅผ ํ†ตํ•ด ์‹œ๋ฎฌ๋ ˆ์ด์…˜ ํ•˜๋Š” ํ•จ์ˆ˜ +def sim(r, b): + q = [] + x, y = r + visited[y][x] = 1 + q.append((r, None)) + + while q: + cur = q.pop(0) + dirs = [(1, 0),(-1, 0),(0, 1),(0, -1)] + pre_d = cur[1] + cur = cur[0] + pre_cnt = visited[cur[1]][cur[0]] + if pre_d: + dirs.remove((-pre_d[0], -pre_d[1])) + for d in dirs: + if not(0 < r[1] + d[1] < n -2 or 0 < r[0] + d[0] < m - 2): continue + r_tmp, b_tmp = move(d, cur, b) + if (r_tmp, b_tmp) == (-1, -1): continue + if r_tmp == True: + return pre_cnt + elif pre_cnt >= 10: + return -1 + elif visited[r_tmp[1]][r_tmp[0]] == 0: + r, b = r_tmp, b_tmp + q.append((r, d)) + visited[r[1]][r[0]] = pre_cnt + 1 + return -1 + +print(sim(r, b)) +``` + +### ์ฒซ ์‹œ๋„์˜ ๋ฌธ์ œ์  + +1. **map_list์— ์‹ค์ œ๋กœ ๊ตฌ์Šฌ์˜ ์œ„์น˜๋ฅผ ๋ฐ˜์˜ํ•  ํ•„์š”๊ฐ€ ์—†๋‹ค.** + 1. ๊ตฌ์Šฌ ๋‘ ๊ฐœ์˜ ์ขŒํ‘œ๋งŒ ํ™•์‹คํžˆ ์•Œ๊ณ  ์žˆ๋‹ค๋ฉด ๋ชจ๋“  ์ƒํ˜ธ์ž‘์šฉ์„ ํ‘œํ˜„ํ•  ์ˆ˜ ์žˆ๋‹ค. + 2. ํ•˜๋‚˜์˜ ๋ฆฌ์ŠคํŠธ์— ์‹ค์ œ๋กœ ๊ตฌ์Šฌ์˜ ์œ„์น˜๋ฅผ ๋ฐ˜์˜ํ•˜๊ฒŒ ๋˜๋ฉด bfs๋ฅผ ํ†ตํ•ด ์‹œ๋ฎฌ๋ ˆ์ด์…˜์„ ํ•˜๋Š” ๋ฐ์— ๋ฌธ์ œ๊ฐ€ ์ƒ๊ธด๋‹ค. + * ์ด์ „๊นŒ์ง€ ์›€์ง์ธ ํšŸ์ˆ˜๊ฐ€ ๊ฐ™์€ (= ํƒ์ƒ‰ ์ง„ํ–‰ ํšŸ์ˆ˜๊ฐ€ ๊ฐ™์€) ๋‹ค๋ฅธ ์‹œ๋„๋ฅผ ํ•  ๋•Œ ์ด์ „ ์‹œํ–‰์ด ์ด๋ฏธ ์ง„ํ–‰๋œ ๋งต์ด๊ธฐ ๋•Œ๋ฌธ์— ์ •์ƒ์ ์ธ ์‹œํ–‰์ด ์ง„ํ–‰๋  ์ˆ˜ ์—†๋‹ค. + +2. **visited ๋ฆฌ์ŠคํŠธ์˜ ์ฐจ์›์ด ๋นจ๊ฐ„ ๊ตฌ์Šฌ์˜ ์ขŒํ‘œ๋งŒ ์ฒดํฌํ•œ๋‹ค.** + 1. ๋นจ๊ฐ„ ๊ตฌ์Šฌ์˜ ์ขŒํ‘œ๊ฐ€ ๊ฐ™์•„๋„ ํŒŒ๋ž€ ๊ตฌ์Šฌ์˜ ์œ„์น˜๊ฐ€ ๋‹ฌ๋ผ ๋‹ค๋ฅธ ์ƒํ™ฉ์ผ ์ˆ˜ ์žˆ๋‹ค. + +3. **์ด์™ธ ๋‹ค๋ฅธ ๋ฌธ์ œ์ ** + 1. ๊ณต์ด ๋‹ค๋ฅธ ๊ณต์— ๋‹ฟ์•„ ๋ฉˆ์ถ”๋Š” ๊ฒฝ์šฐ๋ฅผ ์ง„ํ–‰ํ•˜๋Š” ๋„์ค‘์— ๊ณต์„ ๋งŒ๋‚˜๊ณ  ๊ทธ ๊ณต์ด ์ด์ „์— ์›€์ง์ด์ง€ ์•Š์•˜์„ ๊ฒฝ์šฐ๋ฅผ ๊ธฐ์ค€์œผ๋กœ ํŒ๋‹จํ•˜๋ ค ํ–ˆ์ง€๋งŒ ์ด๊ฒƒ์ด ๋ชจ๋“  ๊ฒฝ์šฐ๋ฅผ ๋‹ค ์ปค๋ฒ„ํ•  ์ˆ˜ ์žˆ๋Š”์ง€๋Š” ์˜๋ฌธ์ด๋‹ค. + 2. ์ž…๋ ฅ์„ ๋ฐ›์„ ๋•Œ ํ…Œ๋‘๋ฆฌ๋ฅผ ์ œ๊ฑฐํ•ด๋ฒ„๋ฆฌ๋‹ˆ ๊ณต๋“ค์ด ๋ฉˆ์ถ”๋Š” ์กฐ๊ฑด์ด ์ถ”๊ฐ€๋˜์–ด์•ผ ํ–ˆ๋‹ค. + * ํ…Œ๋‘๋ฆฌ๋ฅผ ๋‚จ๊ฒจ๋‘”๋‹ค๋ฉด ๋ฒฝ๊ณผ ๊ฐ™์€ ๋ฐฉ์‹์œผ๋กœ ์ฒ˜๋ฆฌํ•  ์ˆ˜ ์žˆ์–ด ์ฝ”๋“œ ์ž‘์„ฑ์— ํŽธ๋ฆฌ + * ํ…Œ๋‘๋ฆฌ๋Š” (m-2) * (n-2) ๊ฐœ์˜ ์›์†Œ๋งŒ์ด ์ถ”๊ฐ€๋˜๋ฉฐ n, m โ‰ค 10์ด๋ฏ€๋กœ ๊ณต๊ฐ„ ๋ณต์žก๋„์— ํฐ ์˜ํ–ฅ์„ ๋ฏธ์น˜์ง€ ์•Š์„ ๊ฒƒ์ด๋ผ ํŒ๋‹จํ•˜์˜€์Œ. + +์œ„ ์ฝ”๋“œ๋Š” ๊ฒฐ๊ณผ์ ์œผ๋กœ ์‹œ๊ฐ„ ์ดˆ๊ณผ๋กœ ํŒ๋‹จ๋˜์—ˆ์ง€๋งŒ ์ฑ„์ ์ด ๊ณ„์† ์ง„ํ–‰๋˜์—ˆ์–ด๋„ ๋ฌธ์ œ๊ฐ€ ๋ฐœ์ƒํ•  ๊ฐ€๋Šฅ์„ฑ์ด ์ถฉ๋ถ„ํžˆ ์žˆ์–ด ๋ณด์˜€๋‹ค + +### ์ˆ˜์ •ํ•œ ์ฝ”๋“œ + +```python +# ํ…Œ๋‘๋ฆฌ ํฌํ•จํ•˜์—ฌ ์ž…๋ ฅ ๋ฐ›์Œ +n, m = map(int, input().split()) +o, r, b = (-1, ), (-1, ), (-1, ) +map_list = [] +for i in range(n): + row = input() + if o[0] == -1: + o = (row.find('O'), i) + if r[0] == -1: + r = (row.find('R'), i) + if b[0] == -1: + b = (row.find('B'), i) + + map_list.append(list(row)) + +# ์ž…๋ ฅ์ด ๋ฐ”๋€œ์— ๋”ฐ๋ผ ๊ณต์ด ๋ฉˆ์ถ”๋Š” ์กฐ๊ฑด์ด ๋” ๊ฐ„๋‹จํ•ด์ง +# ๊ณต์ด ๊ณต์— ๋‹ฟ์•„ ๋ฉˆ์ถ”๋Š” ๊ฒฝ์šฐ๋„ ๋‹ค๋ฅธ ๋ฐฉ์‹์œผ๋กœ ์ฒ˜๋ฆฌ +def tilt(map_list, dir, r, b): + rx, ry = r + bx, by = b + dir_x, dir_y = dir + if dir_x != 0: + if dir_x * rx > dir_x * bx: late = 'b' + else: late = 'r' + else: + if dir_y * ry > dir_y * by: late = 'b' + else: late = 'r' + + cur_r = map_list[ry][rx] + cur_b = map_list[by][bx] + + next_r, next_b = '.', '.' + + while (next_r != '#' and cur_r != 'O') or (next_b != '#'): + next_r = map_list[ry + dir_y][rx + dir_x] + next_b = map_list[by + dir_y][bx + dir_x] + if next_r != '#' and cur_r != 'O': + cur_r = next_r + rx += dir_x + ry += dir_y + if next_b != '#': + cur_b = next_b + bx += dir_x + by += dir_y + if cur_b == 'O': + return -1, -1, -1, -1 + + if (rx, ry) == (bx, by): + if late == 'b': + bx -= dir_x + by -= dir_y + else: + rx -= dir_x + ry -= dir_y + + return rx, ry, bx, by + +def move(dir, r, b): + m_rx, m_ry, m_bx, m_by = tilt(map_list, dir, r, b) + if m_rx == -1: return (-1, -1) + elif (m_rx, m_ry) == o: return (True, True) + else: + r, b = (m_rx, m_ry), (m_bx, m_by) + return r, b + +# visited๊ฐ€ ๋นจ๊ฐ„ ๊ณต๊ณผ ํŒŒ๋ž€ ๊ณต์˜ ์ขŒํ‘œ๋ฅผ ๋ชจ๋‘ ์ €์žฅํ•  ์ˆ˜ ์žˆ๊ฒŒ ์ˆ˜์ • +visited = [[[[0] * m for _ in range(n)] for _ in range(m)] for _ in range(n)] + +def sim(r, b): + q = [] + q.append((r, b, None)) + + while q: + cur_r, cur_b, pre_d = q.pop(0) + dirs = [(1, 0),(-1, 0),(0, 1),(0, -1)] + pre_cnt = visited[cur_r[1]][cur_r[0]][cur_b[1]][cur_b[0]] + + if pre_d: + dirs.remove(pre_d) + dirs.remove((-pre_d[0], -pre_d[1])) + + for d in dirs: + r_tmp, b_tmp = move(d, cur_r, cur_b) + if r_tmp == -1: + continue + elif r_tmp == True: + return pre_cnt + 1 + elif pre_cnt >= 10: + return -1 + elif not visited[r_tmp[1]][r_tmp[0]][b_tmp[1]][b_tmp[0]]: # ๋ฐฉ๋ฌธ ์•ˆํ•จ + q.append((r_tmp, b_tmp, d)) + visited[r_tmp[1]][r_tmp[0]][b_tmp[1]][b_tmp[0]] = pre_cnt + 1 + + return -1 + +print(sim(r, b)) +``` + +1. **map_list์— ์‹ค์ œ๋กœ ๊ตฌ์Šฌ์˜ ์œ„์น˜๋ฅผ ๋ฐ˜์˜ํ•  ํ•„์š”๊ฐ€ ์—†๋‹ค.** + โ†’ ์‹ค์ œ๋กœ ๋ฐ˜์˜ํ•˜๋Š” ์ฝ”๋“œ๋ฅผ ์‚ญ์ œํ•˜๊ณ  ์ขŒํ‘œ๋งŒ ๋ฐ”๊พธ๋Š” ์‹์œผ๋กœ ํ•จ์ˆ˜๋ฅผ ์ˆ˜์ • + +2. **visited ๋ฆฌ์ŠคํŠธ์˜ ์ฐจ์›์ด ๋นจ๊ฐ„ ๊ตฌ์Šฌ์˜ ์ขŒํ‘œ๋งŒ ์ฒดํฌํ•œ๋‹ค.** + โ†’ 2์ฐจ์› ๋ฆฌ์ŠคํŠธ๋ฅผ 4์ฐจ์› ๋ฆฌ์ŠคํŠธ๋กœ ๋ฐ”๊ฟ”์„œ ๋‘˜ ๋‹ค ์ฒดํฌํ•  ์ˆ˜ ์žˆ๋„๋ก ์ˆ˜์ • + +3. **์ด์™ธ ๋‹ค๋ฅธ ๋ฌธ์ œ์ ** + 1. ๊ณต์ด ๋‹ค๋ฅธ ๊ณต์— ๋‹ฟ์•„ ๋ฉˆ์ถ”๋Š” ๊ฒฝ์šฐ๋ฅผ ์ง„ํ–‰ํ•˜๋Š” ๋„์ค‘์— ๊ณต์„ ๋งŒ๋‚˜๊ณ  ๊ทธ ๊ณต์ด ์ด์ „์— ์›€์ง์ด์ง€ ์•Š์•˜์„ ๊ฒฝ์šฐ๋ฅผ ๊ธฐ์ค€์œผ๋กœ ํŒ๋‹จํ•˜๋ ค ํ–ˆ๋‹ค. + โ†’ ํ•จ์ˆ˜์˜ ์ž…๋ ฅ์œผ๋กœ ๋ฐ›์€ ๋งค๊ฐœ๋ณ€์ˆ˜๋“ค์„ ๊ธฐ๋ฐ˜์œผ๋กœ ๋‘ ๊ตฌ์Šฌ์˜ ์ตœ์ข… ์ขŒํ‘œ๊ฐ€ ๊ฒน์น  ๊ฒฝ์šฐ ๋‚˜์ค‘์— ๋„์ฐฉํ•œ ๊ตฌ์Šฌ์˜ ์ขŒํ‘œ๋ฅผ ํ•œ ์นธ ์ „์œผ๋กœ ๋Œ๋ฆฌ๋Š” ๋ฐฉ์‹์œผ๋กœ ์ˆ˜์ • + + 2. ์ž…๋ ฅ์„ ๋ฐ›์„ ๋•Œ ํ…Œ๋‘๋ฆฌ๋ฅผ ์ œ๊ฑฐํ•ด๋ฒ„๋ฆฌ๋‹ˆ ๊ณต๋“ค์ด ๋ฉˆ์ถ”๋Š” ์กฐ๊ฑด์ด ์ถ”๊ฐ€๋˜์–ด์•ผ ํ–ˆ๋‹ค. + โ†’ ๊ทธ๋ƒฅ ํ…Œ๋‘๋ฆฌ๊นŒ์ง€ ๋‹ค ๋ฐ›์•˜๋‹ค. + + +์ฒ˜์Œ๋ถ€ํ„ฐ ๋„ˆ๋ฌด ๊ณต๊ฐ„ ๋ณต์žก๋„๋ฅผ ์ค„์ด๋ ค๊ณ  ํ•ด์„œ ํ‘ธ๋Š”๋ฐ ๋” ์˜ค๋ž˜ ๊ฑธ๋ฆฐ ๊ฒƒ ๊ฐ™๋‹ค. ๋ฌด์ž‘์ • ๊ณต๊ฐ„ ๋ณต์žก๋„๋ฅผ ์ค„์ด๋ ค๊ณ  ํ•˜์ง€ ๋ง๊ณ  ์ž…๋ ฅ์˜ ํฌ๊ธฐ์™€ ์ฃผ์–ด์ง„ ๋ฌธ์ œ์˜ ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ์„ ์ž˜ ํ™•์ธํ•˜๊ณ  ์ ์ ˆํ•œ ์„ ์„ ์ฐพ์•„์„œ ํ•˜๋Š” ์Šต๊ด€์ด ํ•„์š”ํ•ด ๋ณด์ธ๋‹ค. \ No newline at end of file diff --git a/_posts/2023-01-03-boj_25378.markdown b/_posts/2023-01-03-boj_25378.markdown new file mode 100644 index 00000000000..b486bd118a8 --- /dev/null +++ b/_posts/2023-01-03-boj_25378.markdown @@ -0,0 +1,173 @@ +--- +title: "[BOJ][KOI] ์กฐ์•ฝ๋Œ - 25378 (G1)" +author: kwon +date: 2022-12-16T23:00:00 +0900 +categories: [boj, gold] +tags: [math, greedy algorithm, sort] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 0.5 ์ดˆ | 1024 MB | + +# ๋ฌธ์ œ + +์ขŒ์šฐ ํ•œ ์ค„๋กœ ์žˆ๋Š” N๊ฐœ์˜ ์žฅ์†Œ ๊ฐ๊ฐ์— ์กฐ์•ฝ๋Œ์ด ๋ช‡ ๊ฐœ์”ฉ ๋†“์—ฌ ์žˆ๋‹ค. + +์ฒ ์ˆ˜๊ฐ€ ํ•  ์ˆ˜ ์žˆ๋Š” ์ž‘์—…์˜ ์ข…๋ฅ˜๋Š” ์•„๋ž˜ ๋‘ ๊ฐ€์ง€์ด๋‹ค. + +1. ์ธ์ ‘ํ•œ ๋‘ ์žฅ์†Œ์—์„œย **์ž„์˜์˜ ๋™์ผํ•œ ๊ฐœ์ˆ˜**์˜ ์กฐ์•ฝ๋Œ์„ ๊ฐ€์ ธ๊ฐ€๊ธฐ +2. ํ•œ ์žฅ์†Œ์—์„œย **์ž„์˜์˜ ๊ฐœ์ˆ˜**์˜ ์กฐ์•ฝ๋Œ์„ ๊ฐ€์ ธ๊ฐ€๊ธฐ + +์–ด๋–ค ์žฅ์†Œ์— ์กฐ์•ฝ๋Œ์ด ๋” ์ด์ƒ ์—†๋Š” ๊ฒฝ์šฐ์—๋„ ๊ทธ ์žฅ์†Œ๋Š” ๊ทธ๋Œ€๋กœ ๋‚จ์•„ ์žˆ์–ด์„œ, ์ดˆ๊ธฐ์— ์ธ์ ‘ํ•˜์ง€ ์•Š์•˜๋˜ ๋‘ ์žฅ์†Œ๊ฐ€ ์ธ์ ‘ํ•œ ๊ฒƒ์œผ๋กœ ๋ฐ”๋€Œ์ง€ ์•Š๋Š”๋‹ค. + +์ฒ ์ˆ˜๋Š” ์œ„์˜ ๋‘ ์ž‘์—… ์ค‘ ํ•˜๋‚˜๋ฅผ ๊ณจ๋ผ์„œ ์‹คํ–‰ํ•˜๋Š” ๊ฒƒ์„ ๋ฐ˜๋ณตํ•˜์—ฌ ๋ชจ๋“  ์กฐ์•ฝ๋Œ์„ ๊ฐ€์ ธ๊ฐ€๋ ค๊ณ  ํ•œ๋‹ค. + +์ดˆ๊ธฐ์— ๊ฐ ์žฅ์†Œ์— ์žˆ๋Š” ์กฐ์•ฝ๋Œ๋“ค์˜ ๊ฐœ์ˆ˜๋ฅผ ์ž…๋ ฅ๋ฐ›์•„, ์ฒ ์ˆ˜๊ฐ€ ํ•  ์ˆ˜ ์žˆ๋Š” ์ตœ์†Œ์˜ ์ž‘์—… ํšŸ์ˆ˜๋ฅผ ๊ณ„์‚ฐํ•˜๋Š” ํ”„๋กœ๊ทธ๋žจ์„ ์ž‘์„ฑํ•˜๋ผ. + +# ์ž…๋ ฅ + +์ฒซ ๋ฒˆ์งธ ์ค„์— ์žฅ์†Œ์˜ ๊ฐœ์ˆ˜ N์ด ์ฃผ์–ด์ง„๋‹ค. + +๋‘ ๋ฒˆ์งธ ์ค„์— N๊ฐœ์˜ ์žฅ์†Œ ๊ฐ๊ฐ์— ์žˆ๋Š” ์กฐ์•ฝ๋Œ ๊ฐœ์ˆ˜๊ฐ€ ์™ผ์ชฝ ์žฅ์†Œ์— ํ•ด๋‹นํ•˜๋Š” ๊ฒƒ๋ถ€ํ„ฐ ์ˆœ์„œ๋Œ€๋กœ ๊ณต๋ฐฑ ํ•˜๋‚˜์”ฉ์„ ์‚ฌ์ด๋กœ ๋‘๊ณ  ์ฃผ์–ด์ง„๋‹ค. + +# ์ถœ๋ ฅ + +์ฒซ ๋ฒˆ์งธ ์ค„์— ๋‹ต์„ ์ถœ๋ ฅํ•œ๋‹ค. + +# ์ œํ•œ + +- 2 โ‰ค N โ‰ค 2 500 +- ๊ฐ ์žฅ์†Œ์˜ ์ดˆ๊ธฐ ์กฐ์•ฝ๋Œ ๊ฐœ์ˆ˜๋Š” $1$ ์ด์ƒ $10^8$ย ์ดํ•˜์ด๋‹ค. + + +# ์„œ๋ธŒํƒœ์Šคํฌ + +| ๋ฒˆํ˜ธ | ๋ฐฐ์  | ์ œํ•œ | +| --- | --- | --- | +| 1 | 6 | N = 3. | +| 2 | 11 | N โ‰ค 15. | +| 3 | 19 | N โ‰ค 300. | +| 4 | 27 | ๊ฐ ์žฅ์†Œ์˜ ์ดˆ๊ธฐ ์กฐ์•ฝ๋Œ ๊ฐœ์ˆ˜๊ฐ€ 2 500 ์ดํ•˜์ด๋‹ค. | +| 5 | 37 | ์ถ”๊ฐ€ ์ œ์•ฝ ์กฐ๊ฑด ์—†์Œ. | + +# ํ’€์ด + +dynamic programing์œผ๋กœ ์ ‘๊ทผํ•ด ๋ณด๋ฉด ์กฐ์•ฝ๋Œ์˜ ์˜ค๋ฅธ์ชฝ์—์„œ ์ž‘์—…์„ ์‹œ์ž‘ํ•˜์—ฌ ๊ฐ ์ž๋ฆฌ๋งˆ๋‹ค ๊ทธ ๋•Œ์˜ ์ตœ์†Œ ์ž‘์—… ํšŸ์ˆ˜๋ฅผ ์ €์žฅํ•˜์—ฌ memoizationํ•  ์ˆ˜ ์žˆ๋‹ค. + +ํ•˜์ง€๋งŒ ๋‹จ์ˆœํžˆ ์ด์ „๊นŒ์ง€์˜ ์ตœ์†Œ ์ž‘์—… ํšŸ์ˆ˜์— ํ˜„์žฌ ์ž๋ฆฌ์˜ ์ตœ์†Œ ์ž‘์—…์„ ๋”ํ•œ๋‹ค๊ณ  ์ตœ์ข…์ ์ธ ์ตœ์†Œ ์ž‘์—… ํšŸ์ˆ˜๊ฐ€ ๋  ์ˆ˜๋Š” ์—†๋‹ค. ์•„๋ž˜์™€ ๊ฐ™์€ ์˜ˆ๋ฅผ ๋ณด์ž. + +> **1 3 2** + +์กฐ์•ฝ๋Œ์˜ ๊ฐœ์ˆ˜๊ฐ€ ์œ„์™€ ๊ฐ™์ด ์ฃผ์–ด์กŒ์„ ๊ฒฝ์šฐ ๋‘ ๋ฒˆ์งธ ์ž๋ฆฌ๊นŒ์ง€ ์ตœ์†Œ ํšŸ์ˆ˜๋Š” 2์ด๋‹ค. ์ด๋ฅผ ์ €์žฅํ•˜๊ณ  ๋‹ค์Œ์œผ๋กœ ๋„˜์–ด๊ฐ€์„œ ๋งˆ์ง€๋ง‰ ์ž๋ฆฌ์ธ ๋‘ ๊ฐœ์˜ ์กฐ์•ฝ๋Œ์„ ๋นผ๋‚ด๋Š” ํšŸ์ˆ˜ 1์„ ๋‹จ์ˆœํžˆ ๋”ํ•˜๋ฉด ์ด ํšŸ์ˆ˜๋Š” 3ํšŒ์ด๋‹ค. + +ํ•˜์ง€๋งŒ ์ฒ˜์Œ๋ถ€ํ„ฐ 1๋ฒˆ ์ž‘์—…์œผ๋กœ ๋ชจ๋‘ ์ง„ํ–‰ํ•˜๊ฒŒ ๋˜๋ฉด ๋‘ ๋ฒˆ ๋งŒ์— ๋๋‚ผ ์ˆ˜ ์žˆ๋‹ค. + +๊ทธ๋Ÿฌ๋ฏ€๋กœ ๋‹จ์ˆœํžˆ ์ด์ „๊นŒ์ง€ ์ตœ์†Œ ํšŸ์ˆ˜์— ํ˜„์žฌ์˜ ์ตœ์†Œ ํšŸ์ˆ˜๋ฅผ ๋”ํ•˜๋ฉด ์•ˆ๋œ๋‹ค. + +1๋ฒˆ ์ž‘์—…๊ณผ 2๋ฒˆ ์ž‘์—… ๊ฐ„์˜ ์ž‘์—… ํšŸ์ˆ˜ ์ฐจ์ด๋ฅผ ์กฐ๊ธˆ ๋” ์ž์„ธํžˆ ์•Œ์•„๋ณด์ž. + +**1๋ฒˆ ์ž‘์—…** + +- r ๋ฒˆ์งธ์—์„œ l ๋ฒˆ์งธ๊นŒ์ง€ 1๋ฒˆ ์ž‘์—… ๋งŒ์œผ๋กœ ๋ชจ๋“  ์ž๋ฆฌ์˜ ๋Œ์˜ ๊ฐœ์ˆ˜๋ฅผ 0์œผ๋กœ ๋งŒ๋“ค ์ˆ˜ ์žˆ๋‹ค๊ณ  ํ–ˆ์„ ๋•Œ ์ž‘์—… ํšŸ์ˆ˜๋Š” (l - r) ๋ฒˆ์ด๋‹ค. (r, r + 1), (r + 1, r + 2), โ€ฆ , (l - 2, l - 1), (l - 1, l) ์ด์™€ ๊ฐ™์€ ์Œ์œผ๋กœ ์ž‘์—…์ด ์ง„ํ–‰๋˜๋ฏ€๋กœ (l -r)๋ฒˆ ๋งŒ์— ์ž‘์—…์„ ๋๋‚ผ ์ˆ˜ ์žˆ๋‹ค. + +**2๋ฒˆ ์ž‘์—…** + +- ์ด ๊ฒฝ์šฐ๋Š” ํ•ด๋‹น ๋ฒ”์œ„์˜ ๋ชจ๋“  ์ž๋ฆฌ์˜ ์กฐ์•ฝ๋Œ์„ ํ•œ ๋ฒˆ์”ฉ ๊ฐ€์ ธ๊ฐ€๋Š” ๊ฒƒ์ด๋ฏ€๋กœ ๋ฒ”์œ„ ๋‚ด์˜ ์ž๋ฆฌ ๊ฐœ์ˆ˜์ธ (l - r + 1)๊ณผ ๊ฐ™์€ ํšŸ์ˆ˜๋งŒํผ ์ž‘์—…์„ ํ•ด์•ผ ํ•œ๋‹ค. + +์ž๋ฆฌ์˜ ๋ฒ”์œ„์— ๋”ฐ๋ผ 1๋ฒˆ ์ž‘์—…์œผ๋กœ๋งŒ ์กฐ์•ฝ๋Œ์„ ๋ชจ๋‘ ๋น„์šธ ์ˆ˜ ์žˆ๋Š”์ง€ ์—ฌ๋ถ€๊ฐ€ ์™„์ „ํžˆ ๋‹ฌ๋ผ์ง€๊ธฐ ๋•Œ๋ฌธ์— ์ด ์ •๋ณด๋ฅผ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ๋„๋ก ๋ฏธ๋ฆฌ ๋ชจ๋“  ๋ฒ”์œ„์— ๋Œ€ํ•ด 1๋ฒˆ ์ž‘์—…์œผ๋กœ๋งŒ ๊ฐ€๋Šฅํ•œ์ง€ ์ฒดํฌํ•ด๋‘”๋‹ค. + +์œ„์—์„œ ๋ณผ ์ˆ˜ ์žˆ๋“ฏ์ด 1๋ฒˆ ์ž‘์—…์„ ์ตœ์ ์œผ๋กœ ์ด์šฉํ•˜๋Š” ๋ฐฉ๋ฒ•์„ ์ฐพ๋Š” ๊ฒƒ์ด ์ด ๋ฌธ์ œ๋ฅผ ํ‘ธ๋Š”๋ฐ ์ค‘์š”ํ•˜๋‹ค๋Š” ๊ฒƒ์„ ์•Œ ์ˆ˜ ์žˆ๋‹ค. + +์ฒดํฌํ•˜๊ธฐ ๊ฐ€์žฅ ์‰ฌ์šด ๋ฐฉ๋ฒ•์€ ๋งค๋ฒˆ ์ธ์ ‘ํ•œ ๋‘ ์กฐ์•ฝ๋Œ์— ๋Œ€ํ•ด 1๋ฒˆ ์ž‘์—…์„ ํ•˜๋ฉด์„œ ๋งค๋ฒˆ ํ•ด๋‹น ๋ฒ”์œ„์˜ ์กฐ์•ฝ๋Œ ์ˆ˜์˜ ํ•ฉ์„ ๊ตฌํ•ด ํ•ฉ์ด 0์ด ๋˜์—ˆ๋Š”์ง€ ํ™•์ธํ•ด๋ณด๋Š” ๊ฒƒ์ด์ง€๋งŒ, ์ด ๊ฒฝ์šฐ $O(n^3)$์˜ ์‹œ๊ฐ„๋ณต์žก๋„๋ฅผ ๊ฐ€์ง€๊ธฐ ๋•Œ๋ฌธ์—, ์ด ๋ฌธ์ œ์˜ ๋ชจ๋“  ์„œ๋ธŒํƒœ์Šคํฌ๋ฅผ ๋งŒ์กฑํ•˜๊ธฐ์— ๋ถ€์ ํ•ฉํ•˜๋‹ค. + +๊ทธ๋ž˜์„œ ์•„๋ž˜์™€ ๊ฐ™์€ ๋ฐฉ๋ฒ•์„ ์‚ฌ์šฉํ•œ๋‹ค. + +```python +check = [[0] * n for _ in range(n)] + +for l in range(n - 1): + tmp = nums[l] + + for r in range(l + 1, n): + if tmp == nums[r]: + check[l][r] = 1 + elif tmp > nums[r]: + break + tmp = abs(tmp - nums[r]) +``` + +์ž„์‹œ ๋ณ€์ˆ˜(`tmp`)์— 1๋ฒˆ ์ž‘์—…์„ ํ•˜๊ณ  ๋‚จ์€ ์ด์ „ ์กฐ์•ฝ๋Œ์˜ ๊ฐœ์ˆ˜๋ฅผ ์ €์žฅํ•œ๋‹ค. ์ž„์‹œ ๋ณ€์ˆ˜์™€ ํ˜„์žฌ ํ™•์ธํ•˜๊ณ  ์žˆ๋Š” ์ž๋ฆฌ์˜ ์กฐ์•ฝ๋Œ ๊ฐœ์ˆ˜(`nums[r]`)๊ฐ€ ๊ฐ™์œผ๋ฉด 1๋ฒˆ ์ž‘์—…๋งŒ์œผ๋กœ ํ•ด๋‹น ๋ฒ”์œ„์˜ ์ž๋ฆฌ๋ฅผ ๋ชจ๋‘ ๋น„์šธ ์ˆ˜ ์žˆ๋‹ค๋Š” ๊ฒƒ์ด๋ฏ€๋กœ ์ฒดํฌํ•ด์ค€๋‹ค. + +๊ฐ™์ง€ ์•Š์„ ๊ฒฝ์šฐ ๊ทธ๋Œ€๋กœ ๋†”๋‘๋ฉด ๋˜์ง€๋งŒ ๋งŒ์•ฝ ์ž„์‹œ ๋ณ€์ˆ˜๊ฐ€ ํ˜„์žฌ ์กฐ์•ฝ๋Œ ๊ฐœ์ˆ˜๋ณด๋‹ค ํฌ๋‹ค๋ฉด ์–ด๋–ป๊ฒŒ ํ•ด๋„ 1๋ฒˆ ์ž‘์—…๋งŒ ํ•ด์„œ ๋ชจ๋‘ ๋‹ค ๋น„์šธ ์ˆ˜ ์—†๊ธฐ ๋•Œ๋ฌธ์— ๋ฐ”๋กœ ๋ฐ˜๋ณต๋ฌธ์„ ์ข…๋ฃŒํ•œ๋‹ค. + +์œ„ ๊ฒฝ์šฐ์—๋„ ํ•ด๋‹น๋˜์ง€ ์•Š์•˜์œผ๋ฉด ์•„์ง 1๋ฒˆ ์ž‘์—…๋งŒ์œผ๋กœ ์กฐ์•ฝ๋Œ์„ ๋‹ค ๋น„์šธ ์ˆ˜ ์žˆ๋Š” ๊ฐ€๋Šฅ์„ฑ์ด ๋‚จ์•„์žˆ๊ธฐ ๋•Œ๋ฌธ์— ์ž„์‹œ ๋ณ€์ˆ˜์— ์ž„์‹œ ๋ณ€์ˆ˜์™€ ํ˜„์žฌ ์กฐ์•ฝ๋Œ ๊ฐœ์ˆ˜์˜ ์ฐจ์ด๋กœ ์ž„์‹œ ๋ณ€์ˆ˜์˜ ๊ฐ’์„ ๋ฐ”๊ฟ”์ค€๋‹ค. ๊ณ„์†ํ•ด์„œ ๋‘ ๊ฐ’์˜ ์ฐจ๋กœ ๊ฐ’์„ ๋ฐ”๊ฟ”์ค˜์•ผ 1๋ฒˆ ์ž‘์—…์„ ํ•˜๊ณ  ๋‚จ์€ ์ด์ „ ์กฐ์•ฝ๋Œ์˜ ๊ฐœ์ˆ˜๊ฐ€ ๋œ๋‹ค. + +์ด์ œ ์œ„์—์„œ ์ฒดํฌํ•œ ๊ฒƒ์„ ์ด์šฉํ•˜์—ฌ ์ตœ์†Œ ์‹œํ–‰์„ ์ฐพ์•„์•ผ ํ•œ๋‹ค. ์ตœ์†Œ ํšŸ์ˆ˜์˜ ํ›„๋ณด๊ฐ€ ๋  ์ˆ˜ ์žˆ๋Š” ๊ฐ’์€ ํฌ๊ฒŒ ๋‘ ๊ฐ€์ง€๊ฐ€ ์žˆ๋‹ค. + +$memo[i]$๋Š” $i$๊นŒ์ง€ ์ž‘์—…์„ ์ˆ˜ํ–‰ํ–ˆ์„ ๋•Œ์˜ ์ตœ์†Œ ํšŸ์ˆ˜, $2\leq i\leq N$ ๋ฒ”์œ„์—์„œ ๋ฐ˜๋ณตํ•˜๋ฉฐ ํ™•์ธํ•  ๊ฒƒ์ด๋ฏ€๋กœ $memo[1]=1$๋กœ ์ดˆ๊ธฐํ™”ํ•œ๋‹ค๊ณ  ํ•˜์ž. ์ด๋•Œ ์ตœ์†Œ ํšŸ์ˆ˜์˜ ํ›„๋ณด๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. + +1. $memo[i-1] + 1$ + + : ์ด์ „๊นŒ์ง€ ์ตœ์†Œ ํšŸ์ˆ˜์— ์ด๋ฒˆ ์ž๋ฆฌ์˜ ์กฐ์•ฝ๋Œ์„ 2๋ฒˆ ์ž‘์—…์œผ๋กœ ๋นผ๋‚ด๋Š” ํšŸ์ˆ˜(1)๋ฅผ ๋”ํ•œ ๊ฒƒ + +2. $\min_{1\leq j < i, check[j][i]=1}(memo[j - 1]+i-j)$ $(2\leq i\leq N)$ + + : ๋ฒ”์œ„๊ฐ€ ๋ฐ”๋€Œ์—ˆ์œผ๋‹ˆ ์ƒˆ๋กญ๊ฒŒ 1๋ฒˆ ์ž‘์—…๋งŒ์œผ๋กœ ๋น„์šฐ๊ธฐ๊ฐ€ ๊ฐ€๋Šฅํ•œ ๋ฒ”์œ„(์ฒดํฌ๋œ ๋ฒ”์œ„)๊ฐ€ ์žˆ๋Š”์ง€ ์ฐพ์•„๋ณด๋Š” ๊ณผ์ •์ด๋‹ค. $1\leq j < i$๋ฅผ ๋งŒ์กฑํ•˜๋Š” $j$๋ถ€ํ„ฐ ์‹œ์ž‘ํ•ด์„œ ์ƒˆ๋กœ์šด ๋ฒ”์œ„์˜ ๋($i$)๊นŒ์ง€ ํ™•์ธํ•˜๋ฉด์„œ ์ฒดํฌํ•œ ๋ฒ”์œ„๊ฐ€ ์žˆ๋Š”์ง€ ํ™•์ธํ•˜๊ณ  ์ฒดํฌ๋œ ๊ฒƒ์ด ํ™•์ธ๋˜๋ฉด ($check[j][i]=1$) ํšŸ์ˆ˜๋ฅผ ๊ณ„์‚ฐํ•œ๋‹ค. + + ์ƒˆ๋กญ๊ฒŒ ์ฐพ์€ ๋ฒ”์œ„๊ฐ€ ์‹œ์ž‘ํ•˜๊ธฐ ์ „๊นŒ์ง€์˜ ์ตœ์†Œ ํšŸ์ˆ˜($memo[j - 1]$)์™€ ์ƒˆ๋กœ์šด ๋ฒ”์œ„์—์„œ 1๋ฒˆ ์ž‘์—…์„ ํ•œ ํšŸ์ˆ˜($i-j$)๋ฅผ ๋”ํ•ด์ฃผ๋ฉด ์ „์ฒด ํšŸ์ˆ˜๋ฅผ ๊ณ„์‚ฐํ•  ์ˆ˜ ์žˆ๋‹ค. + + +์œ„ ๊ณผ์ •์œผ๋กœ ๊ตฌํ•œ ๋‘ ๊ฐ’ ์ค‘ ์ž‘์€ ๊ฒƒ์„ ํ•ด๋‹น ์œ„์น˜์—์„œ์˜ ์ตœ์†Œ ํšŸ์ˆ˜๋กœ ์ทจํ•˜๋ฉด ๋œ๋‹ค. ์ „์ฒด ์ ํ™”์‹์€ ๋‹ค์Œ๊ณผ ๊ฐ™์ด ํ‘œํ˜„ํ•  ์ˆ˜ ์žˆ๋‹ค. + +$$memo[i]=\min\{memo[i-1]+1, \min_{1\leq j < i, check[j][i]=1}(memo[j - 1]+i-j)\}$$ + +```python +memo = [0] * (n + 1) +memo[1] = 1 + +for i in range(2, n + 1): + new_min = float('inf') + for j in range(1, i): + if check[j - 1][i - 1] == 1: + new = memo[j - 1] + i - j + if new_min > new: + new_min = new + + memo[i] = min(memo[i - 1] + 1, new_min) +print(memo[-1]) +``` + +## ์ „์ฒด ์ฝ”๋“œ + +```python +n = int(input()) +nums = list(map(int, input().split())) + +check = [[0] * n for _ in range(n)] +memo = [0] * (n + 1) +memo[1] = 1 + +for l in range(n - 1): + tmp = nums[l] + + for r in range(l + 1, n): + if tmp == nums[r]: + check[l][r] = 1 + elif tmp > nums[r]: + break + tmp = abs(tmp - nums[r]) + +for i in range(2, n + 1): + new_min = float('inf') + for j in range(1, i): + if check[j - 1][i - 1] == 1: + new = memo[j - 1] + i - j + if new_min > new: + new_min = new + + memo[i] = min(memo[i - 1] + 1, new_min) +print(memo[-1]) +``` \ No newline at end of file diff --git a/_posts/2023-02-02-boj_1003.markdown b/_posts/2023-02-02-boj_1003.markdown new file mode 100644 index 00000000000..1c5648002c9 --- /dev/null +++ b/_posts/2023-02-02-boj_1003.markdown @@ -0,0 +1,90 @@ +--- +title: "[BOJ]ํ”ผ๋ณด๋‚˜์น˜ ํ•จ์ˆ˜- 1003 (S3)" +author: kwon +date: 2023-02-02T23:00:00 +0900 +categories: [boj, silver] +tags: [dynamic programing] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 0.25 ์ดˆ (์ถ”๊ฐ€ ์‹œ๊ฐ„ ์—†์Œ) | 128 MB | + +# ๋ฌธ์ œ + +๋‹ค์Œ ์†Œ์Šค๋Š” N๋ฒˆ์งธ ํ”ผ๋ณด๋‚˜์น˜ ์ˆ˜๋ฅผ ๊ตฌํ•˜๋Š” C++ ํ•จ์ˆ˜์ด๋‹ค. + +``` +int fibonacci(int n) { + if (n == 0) { + printf("0"); + return 0; + } else if (n == 1) { + printf("1"); + return 1; + } else { + return fibonacci(nโ€1) + fibonacci(nโ€2); + } +} + +``` + +`fibonacci(3)`์„ ํ˜ธ์ถœํ•˜๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™์€ ์ผ์ด ์ผ์–ด๋‚œ๋‹ค. + +- `fibonacci(3)`์€ย `fibonacci(2)`์™€ย `fibonacci(1)`ย (์ฒซ ๋ฒˆ์งธ ํ˜ธ์ถœ)์„ ํ˜ธ์ถœํ•œ๋‹ค. +- `fibonacci(2)`๋Š”ย `fibonacci(1)`ย (๋‘ ๋ฒˆ์งธ ํ˜ธ์ถœ)๊ณผย `fibonacci(0)`์„ ํ˜ธ์ถœํ•œ๋‹ค. +- ๋‘ ๋ฒˆ์งธ ํ˜ธ์ถœํ•œย `fibonacci(1)`์€ 1์„ ์ถœ๋ ฅํ•˜๊ณ  1์„ ๋ฆฌํ„ดํ•œ๋‹ค. +- `fibonacci(0)`์€ 0์„ ์ถœ๋ ฅํ•˜๊ณ , 0์„ ๋ฆฌํ„ดํ•œ๋‹ค. +- `fibonacci(2)`๋Š”ย `fibonacci(1)`๊ณผย `fibonacci(0)`์˜ ๊ฒฐ๊ณผ๋ฅผ ์–ป๊ณ , 1์„ ๋ฆฌํ„ดํ•œ๋‹ค. +- ์ฒซ ๋ฒˆ์งธ ํ˜ธ์ถœํ•œย `fibonacci(1)`์€ 1์„ ์ถœ๋ ฅํ•˜๊ณ , 1์„ ๋ฆฌํ„ดํ•œ๋‹ค. +- `fibonacci(3)`์€ย `fibonacci(2)`์™€ย `fibonacci(1)`์˜ ๊ฒฐ๊ณผ๋ฅผ ์–ป๊ณ , 2๋ฅผ ๋ฆฌํ„ดํ•œ๋‹ค. + +1์€ 2๋ฒˆ ์ถœ๋ ฅ๋˜๊ณ , 0์€ 1๋ฒˆ ์ถœ๋ ฅ๋œ๋‹ค. N์ด ์ฃผ์–ด์กŒ์„ ๋•Œ,ย `fibonacci(N)`์„ ํ˜ธ์ถœํ–ˆ์„ ๋•Œ, 0๊ณผ 1์ด ๊ฐ๊ฐ ๋ช‡ ๋ฒˆ ์ถœ๋ ฅ๋˜๋Š”์ง€ ๊ตฌํ•˜๋Š” ํ”„๋กœ๊ทธ๋žจ์„ ์ž‘์„ฑํ•˜์‹œ์˜ค. + +# ์ž…๋ ฅ + +์ฒซ์งธ ์ค„์— ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์˜ ๊ฐœ์ˆ˜ T๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. + +๊ฐ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค๋Š” ํ•œ ์ค„๋กœ ์ด๋ฃจ์–ด์ ธ ์žˆ๊ณ , N์ด ์ฃผ์–ด์ง„๋‹ค. N์€ 40๋ณด๋‹ค ์ž‘๊ฑฐ๋‚˜ ๊ฐ™์€ ์ž์—ฐ์ˆ˜ ๋˜๋Š” 0์ด๋‹ค. + +# ์ถœ๋ ฅ + +๊ฐ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค๋งˆ๋‹ค 0์ด ์ถœ๋ ฅ๋˜๋Š” ํšŸ์ˆ˜์™€ 1์ด ์ถœ๋ ฅ๋˜๋Š” ํšŸ์ˆ˜๋ฅผ ๊ณต๋ฐฑ์œผ๋กœ ๊ตฌ๋ถ„ํ•ด์„œ ์ถœ๋ ฅํ•œ๋‹ค. + +# ํ’€์ด + +๊ฐ ์ˆ˜๋ฅผ ํ†ตํ•ด ํ”ผ๋ณด๋‚˜์น˜ ํ•จ์ˆ˜๋ฅผ ํ˜ธ์ถœํ–ˆ์„ ๋•Œ 0๊ณผ 1์ด ์ถœ๋ ฅ ๋˜๋Š” ํšŸ์ˆ˜๋ฅผ ๊ฐ ๋”•์…”๋„ˆ๋ฆฌ์— memoizationํ•˜์—ฌ 0๊ณผ 1์˜ ์ถœ๋ ฅ ํšŸ์ˆ˜๋ฅผ ๊ตฌํ•œ๋‹ค. ๋‹จ์ˆœํžˆ ํ”ผ๋ณด๋‚˜์น˜ ์ˆ˜๋ฅผ ๊ตฌํ•˜๋Š” ๊ฒƒ์ด ์•„๋‹ˆ๋ผ ๊ฐœ์ˆ˜๋ฅผ ๊ตฌํ•˜๋Š” ๊ฒƒ์ด๊ธฐ ๋•Œ๋ฌธ์— (0์˜ ํšŸ์ˆ˜, 1์˜ ํšŸ์ˆ˜)๋ฅผ returnํ•˜๋„๋ก ํ•จ์ˆ˜๋ฅผ ์ž‘์„ฑํ•œ๋‹ค. + +## ์ฝ”๋“œ + +```python +n = int(input()) + +memo1 = {} +memo2 = {} + +def fib(n): + if n == 0: + return 1, 0 + elif n == 1: + return 0, 1 + + if n in memo1: + zero = memo1[n] + if n in memo2: + one = memo2[n] + else: + zero1, one1 = fib(n - 1) + zero2, one2 = fib(n - 2) + zero = zero1 + zero2 + one = one1 + one2 + memo1[n] = zero + memo2[n] = one + return zero, one + +for _ in range(n): + num = int(input()) + print(*fib(num), sep=' ') +``` \ No newline at end of file diff --git a/_posts/2023-02-04-boj_1005.markdown b/_posts/2023-02-04-boj_1005.markdown new file mode 100644 index 00000000000..451a73416b8 --- /dev/null +++ b/_posts/2023-02-04-boj_1005.markdown @@ -0,0 +1,293 @@ +--- +title: "[BOJ]ACM Craft - 1005 (G3)" +author: kwon +date: 2023-02-04T14:00:00 +0900 +categories: [boj, gold] +tags: [dynamic programing, graph theory, topological sort] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 1 ์ดˆ | 512 MB | + +# ๋ฌธ์ œ + +์„œ๊ธฐ 2012๋…„! ๋“œ๋””์–ด 2๋…„๊ฐ„ ์ˆ˜๋งŽ์€ ๊ตญ๋ฏผ๋“ค์„ ๊ธฐ๋‹ค๋ฆฌ๊ฒŒย ํ•œ ๊ฒŒ์ž„ ACM Craft (Association of Construction Manager Craft)๊ฐ€ ๋ฐœ๋งค๋˜์—ˆ๋‹ค. + +์ด ๊ฒŒ์ž„์€ ์ง€๊ธˆ๊นŒ์ง€ ๋‚˜์˜จ ๊ฒŒ์ž„๋“ค๊ณผ๋Š” ๋‹ค๋ฅด๊ฒŒ ACMํฌ๋ž˜ํ”„ํŠธ๋Š” ๋‹ค์ด๋‚˜๋ฏนํ•œ ๊ฒŒ์ž„ ์ง„ํ–‰์„ ์œ„ํ•ด ๊ฑด๋ฌผ์„ ์ง“๋Š” ์ˆœ์„œ๊ฐ€ ์ •ํ•ด์ ธ ์žˆ์ง€ ์•Š๋‹ค. ์ฆ‰, ์ฒซ ๋ฒˆ์งธ ๊ฒŒ์ž„๊ณผ ๋‘ ๋ฒˆ์งธ ๊ฒŒ์ž„์ด ๊ฑด๋ฌผ์„ ์ง“๋Š” ์ˆœ์„œ๊ฐ€ ๋‹ค๋ฅผ ์ˆ˜๋„ ์žˆ๋‹ค. ๋งค ๊ฒŒ์ž„์‹œ์ž‘ ์‹œ ๊ฑด๋ฌผ์„ ์ง“๋Š” ์ˆœ์„œ๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. ๋˜ํ•œ ๋ชจ๋“  ๊ฑด๋ฌผ์€ ๊ฐ๊ฐ ๊ฑด์„ค์„ ์‹œ์ž‘ํ•˜์—ฌ ์™„์„ฑ์ด ๋  ๋•Œ๊นŒ์ง€ Delay๊ฐ€ ์กด์žฌํ•œ๋‹ค. + +![](https://www.acmicpc.net/upload/201003/star.JPG) + +์œ„์˜ ์˜ˆ์‹œ๋ฅผ ๋ณด์ž. + +์ด๋ฒˆ ๊ฒŒ์ž„์—์„œ๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™์ด ๊ฑด์„ค ์ˆœ์„œ ๊ทœ์น™์ด ์ฃผ์–ด์กŒ๋‹ค.ย 1๋ฒˆ ๊ฑด๋ฌผ์˜ ๊ฑด์„ค์ด ์™„๋ฃŒ๋œ๋‹ค๋ฉด 2๋ฒˆ๊ณผ 3๋ฒˆ์˜ ๊ฑด์„ค์„ ์‹œ์ž‘ํ• ์ˆ˜ ์žˆ๋‹ค. (๋™์‹œ์— ์ง„ํ–‰์ด ๊ฐ€๋Šฅํ•˜๋‹ค)ย ๊ทธ๋ฆฌ๊ณ  4๋ฒˆ ๊ฑด๋ฌผ์„ ์ง“๊ธฐ ์œ„ํ•ด์„œ๋Š” 2๋ฒˆ๊ณผ 3๋ฒˆ ๊ฑด๋ฌผ์ด ๋ชจ๋‘ ๊ฑด์„ค ์™„๋ฃŒ๋˜์–ด์•ผ์ง€๋งŒ 4๋ฒˆ๊ฑด๋ฌผ์˜ ๊ฑด์„ค์„ ์‹œ์ž‘ํ• ์ˆ˜ ์žˆ๋‹ค. + +๋”ฐ๋ผ์„œ 4๋ฒˆ๊ฑด๋ฌผ์˜ ๊ฑด์„ค์„ ์™„๋ฃŒํ•˜๊ธฐ ์œ„ํ•ด์„œ๋Š” ์šฐ์„  ์ฒ˜์Œ 1๋ฒˆ ๊ฑด๋ฌผ์„ ๊ฑด์„คํ•˜๋Š”๋ฐ 10์ดˆ๊ฐ€ ์†Œ์š”๋œ๋‹ค. ๊ทธ๋ฆฌ๊ณ  2๋ฒˆ ๊ฑด๋ฌผ๊ณผ 3๋ฒˆ ๊ฑด๋ฌผ์„ ๋™์‹œ์— ๊ฑด์„คํ•˜๊ธฐ ์‹œ์ž‘ํ•˜๋ฉด 2๋ฒˆ์€ 1์ดˆ๋’ค์— ๊ฑด์„ค์ด ์™„๋ฃŒ๋˜์ง€๋งŒ ์•„์ง 3๋ฒˆ ๊ฑด๋ฌผ์ด ์™„๋ฃŒ๋˜์ง€ ์•Š์•˜์œผ๋ฏ€๋กœ 4๋ฒˆ ๊ฑด๋ฌผ์„ ๊ฑด์„คํ•  ์ˆ˜ ์—†๋‹ค. 3๋ฒˆ ๊ฑด๋ฌผ์ด ์™„์„ฑ๋˜๊ณ  ๋‚˜๋ฉด ๊ทธ๋•Œ 4๋ฒˆ ๊ฑด๋ฌผ์„ ์ง€์„์ˆ˜ ์žˆ์œผ๋ฏ€๋กœ 4๋ฒˆ ๊ฑด๋ฌผ์ด ์™„์„ฑ๋˜๊ธฐ๊นŒ์ง€๋Š” ์ด 120์ดˆ๊ฐ€ ์†Œ์š”๋œ๋‹ค. + +ํ”„๋กœ๊ฒŒ์ด๋จธ ์ตœ๋ฐฑ์ค€์€ ์• ์ธ๊ณผ์˜ ๋ฐ์ดํŠธ ๋น„์šฉ์„ ๋งˆ๋ จํ•˜๊ธฐ ์œ„ํ•ด ์„œ๊ฐ•๋Œ€ํ•™๊ต๋ฐฐ ACMํฌ๋ž˜ํ”„ํŠธ ๋Œ€ํšŒ์— ์ฐธ๊ฐ€ํ–ˆ๋‹ค! ์ตœ๋ฐฑ์ค€์€ ํ™”๋ คํ•œ ์ปจํŠธ๋กค ์‹ค๋ ฅ์„ ๊ฐ€์ง€๊ณ  ์žˆ๊ธฐ ๋•Œ๋ฌธ์— ๋ชจ๋“  ๊ฒฝ๊ธฐ์—์„œ ํŠน์ • ๊ฑด๋ฌผ๋งŒ ์ง“๋Š”๋‹ค๋ฉด ๋ฌด์กฐ๊ฑด ๊ฒŒ์ž„์—์„œ ์ด๊ธธ ์ˆ˜ ์žˆ๋‹ค.ย ๊ทธ๋Ÿฌ๋‚˜ ๋งค ๊ฒŒ์ž„๋งˆ๋‹ค ํŠน์ •๊ฑด๋ฌผ์„ ์ง“๊ธฐ ์œ„ํ•œ ์ˆœ์„œ๊ฐ€ ๋‹ฌ๋ผ์ง€๋ฏ€๋กœ ์ตœ๋ฐฑ์ค€์€ ์ขŒ์ ˆํ•˜๊ณ  ์žˆ์—ˆ๋‹ค.ย ๋ฐฑ์ค€์ด๋ฅผ ์œ„ํ•ด ํŠน์ •๊ฑด๋ฌผ์„ ๊ฐ€์žฅ ๋นจ๋ฆฌ ์ง€์„ ๋•Œ๊นŒ์ง€ ๊ฑธ๋ฆฌ๋Š” ์ตœ์†Œ์‹œ๊ฐ„์„ ์•Œ์•„๋‚ด๋Š” ํ”„๋กœ๊ทธ๋žจ์„ ์ž‘์„ฑํ•ด์ฃผ์ž. + +# ์ž…๋ ฅ + +์ฒซ์งธ ์ค„์—๋Š” ํ…Œ์ŠคํŠธ์ผ€์ด์Šค์˜ ๊ฐœ์ˆ˜ T๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. ๊ฐ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์ฃผ์–ด์ง„๋‹ค.ย ์ฒซ์งธ ์ค„์— ๊ฑด๋ฌผ์˜ ๊ฐœ์ˆ˜ N๊ณผ ๊ฑด๋ฌผ๊ฐ„์˜ ๊ฑด์„ค์ˆœ์„œ ๊ทœ์น™์˜ ์ด ๊ฐœ์ˆ˜ K์ด ์ฃผ์–ด์ง„๋‹ค.ย (๊ฑด๋ฌผ์˜ ๋ฒˆํ˜ธ๋Š” 1๋ฒˆ๋ถ€ํ„ฐ N๋ฒˆ๊นŒ์ง€ ์กด์žฌํ•œ๋‹ค) + +๋‘˜์งธ ์ค„์—๋Š” ๊ฐ ๊ฑด๋ฌผ๋‹น ๊ฑด์„ค์— ๊ฑธ๋ฆฌ๋Š” ์‹œ๊ฐ„ D1, D2, ..., DN์ด ๊ณต๋ฐฑ์„ ์‚ฌ์ด๋กœ ์ฃผ์–ด์ง„๋‹ค.ย ์…‹์งธ ์ค„๋ถ€ํ„ฐ K+2์ค„๊นŒ์ง€ ๊ฑด์„ค์ˆœ์„œ X Y๊ฐ€ ์ฃผ์–ด์ง„๋‹ค.ย (์ด๋Š” ๊ฑด๋ฌผ X๋ฅผ ์ง€์€ ๋‹ค์Œ์— ๊ฑด๋ฌผ Y๋ฅผ ์ง“๋Š” ๊ฒƒ์ด ๊ฐ€๋Šฅํ•˜๋‹ค๋Š” ์˜๋ฏธ์ด๋‹ค) + +๋งˆ์ง€๋ง‰ ์ค„์—๋Š” ๋ฐฑ์ค€์ด๊ฐ€ ์Šน๋ฆฌํ•˜๊ธฐ ์œ„ํ•ด ๊ฑด์„คํ•ด์•ผย ํ•  ๊ฑด๋ฌผ์˜ ๋ฒˆํ˜ธ W๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. + +# ์ถœ๋ ฅ + +๊ฑด๋ฌผ W๋ฅผ ๊ฑด์„ค์™„๋ฃŒ ํ•˜๋Š”๋ฐ ๋“œ๋Š” ์ตœ์†Œ ์‹œ๊ฐ„์„ ์ถœ๋ ฅํ•œ๋‹ค.ย ํŽธ์˜์ƒ ๊ฑด๋ฌผ์„ ์ง“๋Š” ๋ช…๋ น์„ ๋‚ด๋ฆฌ๋Š” ๋ฐ๋Š” ์‹œ๊ฐ„์ด ์†Œ์š”๋˜์ง€ ์•Š๋Š”๋‹ค๊ณ  ๊ฐ€์ •ํ•œ๋‹ค. + +๊ฑด์„ค์ˆœ์„œ๋Š” ๋ชจ๋“  ๊ฑด๋ฌผ์ด ๊ฑด์„ค ๊ฐ€๋Šฅํ•˜๋„๋ก ์ฃผ์–ด์ง„๋‹ค. + +# ํ’€์ด + +## ์ฒซ ์ ‘๊ทผ + +์ฒ˜์Œ ๋ฌธ์ œ๋ฅผ ๋ดค์„ ๋•Œ๋Š” ๊ทธ๋ž˜ํ”„ ๊ตฌ์กฐ ๋ฌธ์ œ์ด๊ณ , BFS๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ ์ ‘๊ทผํ•ด์•ผ๊ฒ ๋‹ค๊ณ  ์ƒ๊ฐํ–ˆ๋‹ค. DFS๋กœ ์ ‘๊ทผํ•˜๋ฉด ๋ชฉํ‘œํ•œ ๊ฑด๋ฌผ์ด ์—†๋Š” ๊ธธ์ด ๋„ˆ๋ฌด ๊นŠ๋‹ค๋ฉด ์‹œ๊ฐ„ ๋ณต์žก๋„๊ฐ€ ๋„ˆ๋ฌด ์ฆ๊ฐ€ํ•  ๊ฒƒ์ด๋ผ๊ณ  ์ƒ๊ฐํ–ˆ๋‹ค. ๋˜ํ•œ ํ•œ ๊ฑด๋ฌผ๋กœ ๊ฐ€๋Š” ๊ธธ์ด ์—ฌ๋Ÿฌ ๊ฐœ์ด๋ฉฐ, ๋ชฉํ‘œ ๊ฑด๋ฌผ์— ๋„๋‹ฌํ•˜๋Š” ๊ธธ์„ ๋ชจ๋‘ ํƒ์ƒ‰ํ–ˆ์œผ๋ฉด ๋” ๊นŠ๊ฒŒ ํƒ์ƒ‰ํ•  ํ•„์š”๊ฐ€ ์—†๊ธฐ ๋•Œ๋ฌธ์— DFS๋Š” ๋งค์šฐ ๋ถˆ๋ฆฌํ•  ๊ฒƒ์ด๋ผ ์ƒ๊ฐํ–ˆ๋‹ค. + +๋ฌธ์ œ์—์„œ๋Š” ์–ด๋–ค ๊ฑด๋ฌผ๋กœ ๊ฑด์ถ•์„ ์‹œ์ž‘ํ•ด์•ผ ํ•˜๋Š”์ง€ ์ •ํ™•ํžˆ ๋ช…์‹œํ•˜์ง€ ์•Š๊ณ  ์žˆ๋‹ค. ๊ทธ๋ž˜์„œ ์ฝ”๋“œ ๋‚ด์—์„œ ์ด๋ฅผ ํ•ด๊ฒฐํ•ด ์ฃผ์–ด์•ผ ํ•œ๋‹ค. ์–ด๋–ค ๊ฑด๋ฌผ์ด ์ฒ˜์Œ์œผ๋กœ ์ง€์–ด์งˆ ์ˆ˜ ์žˆ๋‹ค๋Š” ๊ฒƒ์€ ๊ทธ ๊ฑด๋ฌผ์„ ์ง“๊ธฐ ์ „์— ๋‹ค๋ฅธ ๊ฑด๋ฌผ๋“ค์„ ์ง€์„ ํ•„์š”๊ฐ€ ์—†๋‹ค๋Š” ๋œป์ด๋ฉฐ ์ด๋Š” ๊ฑด์„ค ์ˆœ์„œ๋ฅผ ์ž…๋ ฅ ๋ฐ›์„ ๋•Œ Y๋กœ ์ž…๋ ฅ๋˜์ง€ ์•Š์•˜๋˜ ๊ฐ’์ด๋‹ค. Y๋กœ ํ•œ ๋ฒˆ์ด๋ผ๋„ ์ž…๋ ฅ๋œ ๊ฐ’์€ ์ ์–ด๋„ ํ•˜๋‚˜ ์ด์ƒ์˜ ๊ฑด๋ฌผ์„ ์ง€์–ด์•ผ ๊ทธ ๊ฑด๋ฌผ์„ ์ง€์„ ์ˆ˜ ์žˆ๋‹ค๋Š” ๊ฒƒ์ด๊ธฐ ๋•Œ๋ฌธ์ด๋‹ค. ์ฒ˜์Œ ์‹œ์ž‘ํ•˜๋Š” ๊ฑด๋ฌผ๋“ค์€ ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์ฐพ์•„๋‚ผ ์ˆ˜ ์žˆ๋‹ค. + +```python + ... + first_val = list(range(1, n + 1)) + for i in range(k): + x, y = map(int, input().split()) + xy_dict[x].append(y) + ... + try: first_val.remove(y) + except: pass + ... +``` + +๋ชจ๋“  ๊ฑด๋ฌผ ๋ฒˆํ˜ธ๊ฐ€ ๋“ค์–ด๊ฐ€ ์žˆ๋Š” ๋ฐฐ์—ด์—์„œ Y๋กœ ์ž…๋ ฅ๋œ ๊ฐ’๋“ค๋งŒ ์‚ญ์ œํ•œ๋‹ค. ์ดํ›„ ํƒ์ƒ‰ํ•˜๋ฉด์„œ `time`๋”•์…”๋„ˆ๋ฆฌ์— ๊ฑด๋ฌผ ๋ฒˆํ˜ธ : ์ง“๋Š”๋ฐ ๊ฑธ๋ฆฌ๋Š” ์‹œ๊ฐ„์„ key : vlaue๋กœ ์ €์žฅํ•ด์ค€๋‹ค. ์‹œ๊ฐ„์„ ์ €์žฅํ•  ๋•Œ๋Š” ์—ฌ๋Ÿฌ ๊ฑด๋ฌผ์„ ๋™์‹œ์— ์ง€์„ ์ˆ˜ ์žˆ๊ธฐ ๋•Œ๋ฌธ์— ํƒ์ƒ‰ํ•˜๋ฉด์„œ ๊ณ„์‚ฐํ•œ ์‹œ๊ฐ„๋“ค ์ค‘์— ๊ฐ€์žฅ ํฐ ๊ฒƒ๋งŒ ์ €์žฅํ•˜๋ฉด ๋œ๋‹ค. ์ฝ”๋“œ๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. + +```python + ... + for k in keys: + tmp = [] + for v in xy_dict[k]: + ... + if v not in times: + times[v] = times[k] + d_list[v - 1] + else: + times[v] = max(times[k] + d_list[v - 1], times[v]) + ... +``` + +ํƒ์ƒ‰ํ•˜๋ฉด์„œ ๋‚˜์˜จ ๊ฑด๋ฌผ๋“ค(`v`)์„ ๋‹ค์‹œ queue์— ๋„ฃ์–ด์ฃผ๋ฉด์„œ BFS๋ฅผ ์ง„ํ–‰ํ•œ๋‹ค. + +๋‹ค๋งŒ, ๋ชฉํ‘œ ๊ฑด๋ฌผ์ด Y๋กœ ํ•œ ๋ฒˆ๋„ ๋“ฑ์žฅํ•˜์ง€ ์•Š์•˜๋‹ค๋ฉด ํ•„์š” ๊ฑด๋ฌผ์ด ์—†๋‹ค๋Š” ๊ฒƒ์ด๋ฏ€๋กœ ๋ฐ”๋กœ ๋ชฉํ‘œ ๊ฑด๋ฌผ์˜ ๊ฑด์ถ• ์‹œ๊ฐ„์„ ์ถœ๋ ฅํ•œ๋‹ค. + +```python + ... + if counts[target] != 0: + print(bfs(xy_dict, d_list, first_val, target, counts[target])) + else: + print(d_list[target - 1]) +``` + +ํ•˜์ง€๋งŒ ์ด๋ ‡๊ฒŒ ์ฒ˜๋ฆฌ๋ฅผ ํ•˜๊ณ  BFS ๊ธฐ๋ฐ˜์œผ๋กœ๋งŒ ํ’€์–ด๋‚˜๊ฐ€๋ฉด ๋ชจ๋“  ๊ฒฝ์šฐ๋“ค์„ ์ฒ˜๋ฆฌํ•˜๊ธฐ ์‰ฝ์ง€ ์•Š๋‹ค. ๋‹ค์Œ ์˜ˆ์ œ๋ฅผ ๋ณด์ž. + +``` +1 +5 10 +100000 99999 99997 99994 99990 +4 5 +3 5 +3 4 +2 5 +2 4 +2 3 +1 5 +1 4 +1 3 +1 2 +4 +``` + +์ž…๋ ฅ๋˜๋Š” ์ˆœ์„œ๋ฅผ ๋ณ€๊ฒฝํ•˜์ง€ ์•Š๊ณ  ๋‹จ์ˆœํ•˜๊ฒŒ ํƒ์ƒ‰์„ ์ง„ํ–‰ํ•˜๋ฉด 2, 3 ๋ฒˆ ๊ฑด๋ฌผ์˜ ํƒ์ƒ‰์ด ๋๋‚˜๊ธฐ ์ „์— 4๋ฒˆ์˜ ํƒ์ƒ‰์ด ์™„๋ฃŒ๋˜์–ด ๋ฒ„๋ ค ์˜ค๋‹ต์ด ์ถœ๋ ฅ ๋œ๋‹ค. + +### ์ฒซ ์ ‘๊ทผ ์ „์ฒด ์ฝ”๋“œ + +```python +import sys +from collections import defaultdict +input = sys.stdin.readline + +def bfs(xy_dict, d_list, first_val, target, target_num): + first_idx = 0 + q = [] + times = {} + times[first_val[0]] = d_list[first_val[0] - 1] + keys = [first_val[0]] + while True: + if q: + keys = q.pop(0) + for k in keys: + tmp = [] + for v in xy_dict[k]: + if target == v: target_num -= 1 + tmp.append(v) + if v not in times: + times[v] = times[k] + d_list[v - 1] + else: + times[v] = max(times[k] + d_list[v - 1], times[v]) + q.append(tmp) + if target_num == 0: + return times[target] + if not q: + first_idx += 1 + next = first_val[first_idx] + times[next] = d_list[next - 1] + keys = [next] + +t = int(input()) +for _ in range(t): + n, k = map(int, input().split()) + xy_dict = defaultdict(list) + d_list = list(map(int, input().split())) + + counts = {i:0 for i in range(1, n + 1)} + first_val = list(range(1, n + 1)) + for i in range(k): + x, y = map(int, input().split()) + xy_dict[x].append(y) + counts[y] += 1 + try: first_val.remove(y) + except: pass + target = int(input()) + for x in xy_dict: + xy_dict[x].sort(key=lambda x: counts[x]) + if counts[target] != 0: + print(bfs(xy_dict, d_list, first_val, target, counts[target])) + else: + print(d_list[target - 1]) +``` + +## ๋‹ค๋ฅธ ์ ‘๊ทผ + +๋‹ค๋ฅธ ๋ฐฉ์‹์˜ ์ ‘๊ทผ์ด ํ•„์š”ํ•˜๋‹ค ์ƒ๊ฐํ•˜์—ฌ ์ˆœ์„œ๋ฅผ ์ •ํ•˜๋Š” ์ฃผ์š”ํ•œ ์š”์ธ์ด ๋ฌด์—‡์ผ๊นŒ ์ƒ๊ฐํ•ด ๋ดค๋‹ค. ์œ„ ๋ฐฉ์‹์—์„œ ๋ฌธ์ œ๊ฐ€ ๋๋˜ ๊ฒƒ์€ ๋จผ์ € ์ง€์–ด์ ธ์•ผ ํ•  ๊ฑด๋ฌผ์˜ ํƒ์ƒ‰์ด ๋๋‚˜๊ธฐ ์ „์— ๋‹ค์Œ ๋‹จ๊ณ„์˜ ๊ฑด๋ฌผ์„ ํƒ์ƒ‰ํ•˜๊ธฐ ์‹œ์ž‘ํ•œ๋‹ค๋Š” ๊ฒƒ์ด๋‹ค. ์ด๋•Œ ๊ฑด๋ฌผ๋“ค์˜ ์ˆœ์„œ๋Š” ํ•ด๋‹น ๊ฑด๋ฌผ์˜ ์ง“๊ธฐ ์œ„ํ•ด ๋จผ์ € ์ง€์–ด์•ผ ํ•˜๋Š” ๊ฑด๋ฌผ๋“ค์˜ ๊ฐœ์ˆ˜๋กœ ์ •์˜ํ•  ์ˆ˜ ์žˆ๋‹ค. + +``` +1 +5 10 +100000 99999 99997 99994 99990 +4 5 +3 5 +3 4 +2 5 +2 4 +2 3 +1 5 +1 4 +1 3 +1 2 +4 +``` + +์œ„์—์„œ ๋ดค๋˜ ์˜ˆ์ œ๋ฅผ ๋‹ค์‹œ ๋ณด๋ฉด์„œ ์ƒ๊ฐํ•ด๋ณด์ž. + +๊ฐ ๊ฑด๋ฌผ๋“ค์˜ ์ง“๊ธฐ ์œ„ํ•ด ํ•„์š”ํ•œ ๊ฑด๋ฌผ๋“ค์˜ ๊ฐœ์ˆ˜๋ฅผ ๋‚˜ํƒ€๋‚ด๋ณด๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. + +| ๊ฑด๋ฌผ ๋ฒˆํ˜ธ | 1 | 2 | 3 | 4 | 5 | +| --- | --- | --- | --- | --- | --- | +| ํ•„์š” ๊ฑด๋ฌผ | 0 | 1 | 2 | 3 | 4 | + +ํ•„์š” ๊ฑด๋ฌผ์ด ๋งŽ์„์ˆ˜๋ก ๋’ค์— ํƒ์ƒ‰ํ•˜๋ฉด ์œ„์—์„œ ๋งํ–ˆ๋˜ ๋ฌธ์ œ๋ฅผ ํ•ด๊ฒฐํ•  ์ˆ˜ ์žˆ๋‹ค. + +ํƒ์ƒ‰์„ ํ•  ๊ฑด๋ฌผ์€ ํ•ญ์ƒ ํ•„์š” ๊ฑด๋ฌผ์ด 0์ธ ๊ฒƒ๋“ค๋กœ๋งŒ ๊ตฌ์„ฑํ•  ๊ฒƒ์ด๋‹ค. ์ด๋ฅผ ์–ด๋–ป๊ฒŒ ํ•˜๋Š”์ง€ ํƒ์ƒ‰ ๊ณผ์ •์„ ์„ธ์„ธํ•˜๊ฒŒ ํ™•์ธํ•ด ๋ณด์ž. ํ˜„์žฌ ๊ฑด๋ฌผ๋“ค์˜ ๊ด€๊ณ„๋ฅผ ๊ทธ๋ž˜ํ”„๋กœ ๋‚˜ํƒ€๋‚ด๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. + +![](/posting_imgs/boj-1005.png) + +| ๊ฑด๋ฌผ ๋ฒˆํ˜ธ | 1 | 2 | 3 | 4 | 5 | +| --- | --- | --- | --- | --- | --- | +| ํ•„์š” ๊ฑด๋ฌผ | 0 | 1 | 2 | 3 | 4 | + +queue: 1 + +ํƒ์ƒ‰ ์™„๋ฃŒ: + +์ฒ˜์Œ์—๋Š” ํ•„์š” ๊ฑด๋ฌผ์ด 0์ธ ๊ฒฝ์šฐ๊ฐ€ 1 ๋ฐ–์— ์—†์œผ๋ฏ€๋กœ queue์— 1์„ ๋„ฃ์–ด์ค€๋‹ค. + +| ๊ฑด๋ฌผ ๋ฒˆํ˜ธ | 1 | 2 | 3 | 4 | 5 | +| --- | --- | --- | --- | --- | --- | +| ํ•„์š” ๊ฑด๋ฌผ | 0 | 0 | 1 | 2 | 3 | + +queue: 2 + +ํƒ์ƒ‰ ์™„๋ฃŒ: 1 + +1์€ 2, 3, 4, 5์™€ ์—ฐ๊ฒฐ๋˜์–ด ์žˆ์œผ๋ฏ€๋กœ 1์˜ ํƒ์ƒ‰์ด ๋๋‚˜๋ฉด(๋‹ค ์ง€์–ด์ง€๋ฉด) 2, 3, 4, 5์˜ ํ•„์š” ๊ฑด๋ฌผ์„ ํ•˜๋‚˜์”ฉ ์ค„์ผ ์ˆ˜ ์žˆ๋‹ค. ํ•„์š” ๊ฑด๋ฌผ์„ ์ค„์ธ ํ›„์— ํƒ์ƒ‰์ด ๊ฐ€๋Šฅํ•œ ๊ฑด๋ฌผ์€ 2๋ฒˆ์ด๋‹ค(ํ•„์š” ๊ฑด๋ฌผ์ด 0์ด ๋˜์—ˆ๊ธฐ ๋•Œ๋ฌธ). ์ด๋Ÿฐ ๋ฐฉ์‹์œผ๋กœ ๊ณ„์† ํƒ์ƒ‰ํ•œ๋‹ค. + +| ๊ฑด๋ฌผ ๋ฒˆํ˜ธ | 1 | 2 | 3 | 4 | 5 | +| --- | --- | --- | --- | --- | --- | +| ํ•„์š” ๊ฑด๋ฌผ | 0 | 0 | 0 | 1 | 2 | + +queue: 3 + +ํƒ์ƒ‰ ์™„๋ฃŒ: 1, 2 + +| ๊ฑด๋ฌผ ๋ฒˆํ˜ธ | 1 | 2 | 3 | 4 | 5 | +| --- | --- | --- | --- | --- | --- | +| ํ•„์š” ๊ฑด๋ฌผ | 0 | 0 | 0 | 0 | 1 | + +queue: 4 + +ํƒ์ƒ‰ ์™„๋ฃŒ: 1, 2, 3 + +์ด๋ ‡๊ฒŒ ๋ชฉํ‘œ ๊ฑด๋ฌผ์ด 4๋ฒˆ์ด queue์— ๋“ค์–ด์™€ ํƒ์ƒ‰์„ ๋งˆ์น˜๋ฉด 4๋ฒˆ ๊ฑด๋ฌผ์„ ์ง€์—ˆ๋‹ค๋Š” ๊ฒƒ์ด๋ฏ€๋กœ ํƒ์ƒ‰์„ ๋ฉˆ์ถ”๊ณ  ์ €์žฅํ•ด๋‘์—ˆ๋˜ ์‹œ๊ฐ„์„ ์ถœ๋ ฅํ•˜๋ฉด ๋œ๋‹ค. + +์ด ๋ฌธ์ œ์˜ ์ž…๋ ฅ์œผ๋กœ ์ฃผ์–ด์ง€๋Š” ๊ฑด๋ฌผ๋“ค๋กœ ๋งŒ๋“ค์–ด์ง„ ๊ทธ๋ž˜ํ”„๋Š” ํ•ญ์ƒ ๋ฐฉํ–ฅ์„ฑ์„ ๊ฐ€์ง€๋ฉฐ, ํ•ญ์ƒ ๋ชจ๋“  ๊ฑด๋ฌผ์ด ๊ฑด์ถ• ๊ฐ€๋Šฅํ•˜๋„๋ก ์ฃผ์–ด์ง„๋‹ค๊ณ  ํ–ˆ๊ธฐ ๋•Œ๋ฌธ์— acyclic์ด๋‹ค. ์ฆ‰ ์ด ๋ฌธ์ œ์˜ ๊ทธ๋ž˜ํ”„๋Š” DAG(Directed Acyclic Graph)์ด๋‹ค. DAG์—์„œ ์–ด๋–ค ๋…ธ๋“œ๋กœ ๋“ค์–ด์˜ค๋Š” ๊ฐ„์„ ์˜ ๊ฐœ์ˆ˜๋ฅผ indegree๋ผ๊ณ  ํ•˜๋Š”๋ฐ ์ด indegree์˜ ๊ฐœ์ˆ˜์— ๋”ฐ๋ผ ์ •๋ ฌํ•˜๋Š” ๊ฒƒ์„ ์œ„์ƒ ์ •๋ ฌ(topologicla sort)์ด๋ผ๊ณ  ํ•œ๋‹ค. + +๋”ฐ๋ผ์„œ ์šฐ๋ฆฌ๊ฐ€ ์œ„์—์„œ ํ•„์š” ๊ฑด๋ฌผ(indegree)์— ๋”ฐ๋ผ ์ •๋ ฌํ•˜์—ฌ ์‹œ๊ฐ„์„ ๊ณ„์‚ฐํ•œ ๊ฒƒ์€ ์œ„์ƒ ์ •๋ ฌ์„ ์ด์šฉํ•œ ์•Œ๊ณ ๋ฆฌ์ฆ˜์ธ ๊ฒƒ์ด๋‹ค. + +### ์ตœ์ข… ์ „์ฒด ์ฝ”๋“œ + +```python +import sys +from collections import defaultdict +input = sys.stdin.readline + +def topological_sort(xy_dict, d_list, first_val, counts, target): + times = {val:d_list[val - 1] for val in first_val} + q = first_val + + while q: + cur = q.pop(0) + if cur == target: return times[target] + for next in xy_dict[cur]: + if next not in times: + times[next] = times[cur] + d_list[next - 1] + else: + times[next] = max(times[cur] + d_list[next - 1], times[next]) + counts[next] -= 1 + tmp = list(counts.keys())[:] + for i in tmp: + if counts[i] == 0: + q.append(i) + del counts[i] + +t = int(input()) +for _ in range(t): + n, k = map(int, input().split()) + xy_dict = defaultdict(list) + d_list = list(map(int, input().split())) + + counts = defaultdict(int) + first_val = list(range(1, n + 1)) + for i in range(k): + x, y = map(int, input().split()) + xy_dict[x].append(y) + counts[y] += 1 + try: first_val.remove(y) + except: pass + target = int(input()) + + if target in counts: + print(topological_sort(xy_dict, d_list, first_val, counts, target)) + else: + print(d_list[target - 1]) +``` \ No newline at end of file diff --git a/_posts/2023-02-04-boj_1074.markdown b/_posts/2023-02-04-boj_1074.markdown new file mode 100644 index 00000000000..fe99691b7e3 --- /dev/null +++ b/_posts/2023-02-04-boj_1074.markdown @@ -0,0 +1,111 @@ +--- +title: "[BOJ]Z - 1074 (S1)" +author: kwon +date: 2022-03-10T14:00:00 +0900 +categories: [boj, silver] +tags: [divide and conquer, recursion] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 0.5 ์ดˆ | 512 MB | + +tag: divide and conquer, recursion + +# ๋ฌธ์ œ + +ํ•œ์ˆ˜๋Š” ํฌ๊ธฐ๊ฐ€ $2^Nร—2^N$์ธ 2์ฐจ์› ๋ฐฐ์—ด์„ Z๋ชจ์–‘์œผ๋กœ ํƒ์ƒ‰ํ•˜๋ ค๊ณ  ํ•œ๋‹ค. ์˜ˆ๋ฅผ ๋“ค์–ด, 2ร—2๋ฐฐ์—ด์„ ์™ผ์ชฝ ์œ„์นธ, ์˜ค๋ฅธ์ชฝ ์œ„์นธ, ์™ผ์ชฝ ์•„๋ž˜์นธ, ์˜ค๋ฅธ์ชฝ ์•„๋ž˜์นธ ์ˆœ์„œ๋Œ€๋กœ ๋ฐฉ๋ฌธํ•˜๋ฉด Z๋ชจ์–‘์ด๋‹ค. + +![](https://u.acmicpc.net/21c73b56-5a91-43aa-b71f-9b74925c0adc/Screen%20Shot%202020-12-02%20at%208.09.46%20AM.png) + +N > 1์ธ ๊ฒฝ์šฐ, ๋ฐฐ์—ด์„ ํฌ๊ธฐ๊ฐ€ $2^{N-1}ร—2^{N-1}$๋กœ 4๋“ฑ๋ถ„ ํ•œ ํ›„์— ์žฌ๊ท€์ ์œผ๋กœ ์ˆœ์„œ๋Œ€๋กœ ๋ฐฉ๋ฌธํ•œ๋‹ค. + +๋‹ค์Œ ์˜ˆ๋Š” 22ย ร— 2ย ํฌ๊ธฐ์˜ ๋ฐฐ์—ด์„ ๋ฐฉ๋ฌธํ•œ ์ˆœ์„œ์ด๋‹ค. + +![](https://u.acmicpc.net/adc7cfae-e84d-4d5c-af8e-ee011f8fff8f/Screen%20Shot%202020-12-02%20at%208.11.17%20AM.png) + +N์ด ์ฃผ์–ด์กŒ์„ ๋•Œ, rํ–‰ c์—ด์„ ๋ช‡ ๋ฒˆ์งธ๋กœ ๋ฐฉ๋ฌธํ•˜๋Š”์ง€ ์ถœ๋ ฅํ•˜๋Š” ํ”„๋กœ๊ทธ๋žจ์„ ์ž‘์„ฑํ•˜์‹œ์˜ค. + +๋‹ค์Œ์€ N=3์ผ ๋•Œ์˜ ์˜ˆ์ด๋‹ค. + +![](https://u.acmicpc.net/d3e84bb7-9424-4764-ad3a-811e7fcbd53f/Screen%20Shot%202020-12-30%20at%2010.50.47%20PM.png) + +# ์ž…๋ ฅ + +์ฒซ์งธ ์ค„์— ์ •์ˆ˜ N, r, c๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. + +# ์ถœ๋ ฅ + +rํ–‰ c์—ด์„ ๋ช‡ ๋ฒˆ์งธ๋กœ ๋ฐฉ๋ฌธํ–ˆ๋Š”์ง€ ์ถœ๋ ฅํ•œ๋‹ค. + +# ์ œํ•œ + +- $1โ‰คNโ‰ค15$ +- $0โ‰คr,c<2^N$ + +# ํ’€์ด + +์ˆ˜์˜ ํฌ๊ธฐ ์ œํ•œ์ด $2^{15}$์ธ ๋งŒํผ ๋ชจ๋‘ ์žฌ๊ท€๋กœ ํƒ์ƒ‰ํ•˜๋Š” ๊ฒƒ์€ ์ œํ•œ ์‹œ๊ฐ„์„ ๋งž์ถ”์ง€ ๋ชปํ•œ๋‹ค. ๋ณดํ†ต 1์ดˆ์— ์•ฝ $10^8$๋ฒˆ์˜ ์—ฐ์‚ฐ์„ ํ•œ๋‹ค๊ณ  ์ƒ๊ฐํ•˜๋ฏ€๋กœ O(N)์œผ๋กœ๋„ ํ•ด๊ฒฐํ•  ์ˆ˜ ์—†์–ด์ง„๋‹ค. + +๊ทธ๋Ÿฌ๋ฏ€๋กœ ์ฃผ์–ด์ง„ x, y๊ฐ€ ์žˆ๋Š” ์˜์—ญ์œผ๋กœ ๋น ๋ฅด๊ฒŒ ๋‹ค๊ฐ€๊ฐˆ ์ˆ˜ ์žˆ๋Š” ๋‹ค๋ฅธ ๋ฐฉ๋ฒ•์„ ์ฐพ์•„์•ผ ํ•œ๋‹ค. ํ•˜๋‚˜์˜ ํฐ Z๋Š” ์ด๋ฅผ ์‚ฌ๋“ฑ๋ถ„ํ•˜์—ฌ ์ž‘์€ Z๋ฅผ ๋งŒ๋“ค ์ˆ˜ ์žˆ๋‹ค. ์ด๋ฅผ ํ™œ์šฉํ•˜์—ฌ ์ฃผ์–ด์ง„ x, y๊ฐ€ ํ•ด๋‹น ์˜์—ญ์˜ ๊ฐ ๋ณ€์„ ์ง๊ฐ ์ด๋“ฑ๋ถ„ ํ•˜๋Š” ์ง์„ ์„ ๊ธฐ์ค€์œผ๋กœ ํฐ์ง€ ์ž‘์€์ง€ ํ™•์ธํ•˜๋ฉด 4๊ฐœ์˜ ์ž‘์€ ์˜์—ญ ์ค‘์— ์–ด๋”” ์†ํ•ด์žˆ๋Š”์ง€ ์•Œ ์ˆ˜ ์žˆ๋‹ค. 2์ฐจ์›์—์„œ ์ง„ํ–‰ํ•˜๋Š” ์ผ์ข…์˜ ์ด๋ถ„ ํƒ์ƒ‰์ด๋ผ๊ณ  ํ•  ์ˆ˜๋„ ์žˆ๊ฒ ๋‹ค. + +x, y๊ฐ€ ์–ด๋А ์˜์—ญ์— ์†ํ•˜๋Š”์ง€ ํ™•์ธํ•˜๋Š” ์ฝ”๋“œ๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์ž‘์„ฑํ•˜์˜€๋‹ค. ๋’ค์— ๋”ฐ๋ผ์˜ค๋Š” ๊ทธ๋ฆผ๊ณผ ํ•จ๊ป˜ ๋ณด์ž. + +```python +... + num = 2 ** (N - 1) + # ์ขŒ์ƒ๋‹จ + if x <= num and y <= num: + position(N-1, x, y, base) + + # ์šฐ์ƒ๋‹จ + elif x > num and y <= num: + position(N-1, x - num, y, 4 ** (N - 1) + base) + + # ์ขŒํ•˜๋‹จ + elif x <= num and y > num: + position(N-1, x, y - num, 2 * 4 ** (N - 1) + base) + + # ์šฐํ•˜๋‹จ + elif x > num and y > num: + position(N-1, x - num, y - num, 3 * 4 ** (N - 1) + base) +``` + +![](/posting_imgs/boj_1074.jpg) + +์œ„์™€ ๊ฐ™์ด ํ•œ ๋ณ€์ด $2^n$์ธ ์˜์—ญ์ด ์ฃผ์–ด์กŒ์„ ๋•Œ ๊ฐ ๋ณ€์„ ์ง๊ฐ ์ด๋“ฑ๋ถ„ ํ•˜๋Š” ์„ ์€ ๊ฐ€์žฅ ์ขŒ์ƒ๋‹จ์˜ ๊ผญ์ง“์ ์—์„œ $2^n/2(=2^{n-1})$ ๋งŒํผ ๋–จ์–ด์ ธ ์žˆ๋Š” ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. ๊ทธ๋Ÿฌ๋ฏ€๋กœ x, y๊ฐ€ $2^{n-1}$๋ณด๋‹ค ํฐ์ง€ ์ž‘์€์ง€ ํ™•์ธํ•˜๋ฉด ๋„ค ๊ฐœ์˜ ์˜์—ญ ์ค‘ ์–ด๋””์— ์†ํ•ด์žˆ๋Š”์ง€ ์•Œ ์ˆ˜ ์žˆ๋‹ค. ๋‹จ ์—ฌ๊ธฐ์„œ x๋Š” ์ปค์งˆ์ˆ˜๋ก ์˜ค๋ฅธ์ชฝ, y๋Š” ์ปค์งˆ์ˆ˜๋ก ์•„๋ž˜๋กœ ์ง„ํ–‰ํ•œ๋‹ค๊ณ  ์ƒ๊ฐํ•ด์•ผ ํ•œ๋‹ค. + +base๋Š” ๋ง ๊ทธ๋Œ€๋กœ ์ง€๊ธˆ ํ™•์ธํ•˜๋ ค ํ•˜๋Š” ์˜์—ญ์˜ ๊ธฐ์ค€ ์นธ์„ ๋œปํ•˜๋Š” ๊ฒƒ์œผ๋กœ ๊ฐ€์žฅ ์ขŒ์ƒ๋‹จ์— ์œ„์น˜ํ•˜๋Š” ์นธ์„ ๋œปํ•œ๋‹ค. ๊ทธ๋ฆฌ๊ณ  ์ด base๋Š” ์›ํ•˜๋Š” ์ ์ด ์–ด๋А ์˜์—ญ์— ์†ํ•˜๋Š” ์ง€์— ๋”ฐ๋ผ ๊ณ„์† ๋ฐ”๋€Œ์–ด์•ผ ํ•œ๋‹ค. ์ฆ‰, ๊ฐ ์˜์—ญ์— ๋”ฐ๋ผ ๋…ธ๋ž€์ƒ‰์œผ๋กœ ์น ํ•œ ์นธ์ด ๋‹ค์Œ ์˜์—ญ์˜ base๊ฐ€ ๋œ๋‹ค. ๊ทธ๋ฆฌ๊ณ  ์ด base๋Š” Z ์ˆœ์„œ์— ๋”ฐ๋ผ $base+0,base+(4^{n-1}),base+(2\times4^{n-1}),base+(3\times4^{n-1})$๋กœ ๊ณ„์‚ฐํ•  ์ˆ˜ ์žˆ๋‹ค. ์ด๋Š” ์ž‘์€ ์˜์—ญ์— ์†ํ•œ ์นธ์˜ ๊ฐœ์ˆ˜๊ฐ€ $2^{n-1}\times2^{n-1}(=4^{n-1})$์ด๊ธฐ ๋•Œ๋ฌธ์ด๋‹ค. + +## ์ฝ”๋“œ + +```python +import sys +N, y, x = map(int, sys.stdin.readline().split()) + +def position(N, x, y, base=0): + if N == 0 or (x <= 1 and y <= 1): + print(base) + return base + + num = 2 ** (N - 1) + if x <= num and y <= num: + position(N-1, x, y, base) + + elif x > num and y <= num: + position(N-1, x - num, y, 4 ** (N - 1) + base) + + elif x <= num and y > num: + position(N-1, x, y - num, 2 * 4 ** (N - 1) + base) + + elif x > num and y > num: + position(N-1, x - num, y - num, 3 * 4 ** (N - 1) + base) + +position(N, x + 1, y + 1) +``` + +์œ„์—์„œ ์„ค๋ช…ํ•œ ์•Œ๊ณ ๋ฆฌ์ฆ˜์„ ๋”ฐ๋ผ ๊ณ„์† ์ง„ํ–‰ํ•œ๋‹ค. ๋‹ค๋งŒ, ์„ค๋ช…ํ•  ๋•Œ๋Š” x, y๊ฐ’์ด ๋ณ€ํ•˜์ง€ ์•Š๋Š” ๊ฒƒ์œผ๋กœ ์ƒ๊ฐํ•˜๊ณ  ์ง„ํ–‰ํ–ˆ์ง€๋งŒ ํŽธ์˜๋ฅผ ์œ„ํ•ด x, y ์ค‘ ๋ณ€ํ•˜๋Š” ์ขŒํ‘œ์— $2^{n-1}$๋ฅผ ๋นผ๋ฉฐ ์ง„ํ–‰ํ•˜์˜€๋‹ค. ํ•„์š” ์—†๋Š” ์˜์—ญ์„ ์ž˜๋ผ๋‚ด๋ฉด์„œ ์ง„ํ–‰ํ•œ๋‹ค๊ณ  ์ƒ๊ฐํ•˜๋ฉด ๋œ๋‹ค. x, y์˜ ๊ฐ’์ด ์‹ค์ œ์™€ ๋‹ฌ๋ผ์ ธ๋„ base์˜ ๊ฐ’๋งŒ ์•Œ๋งž๊ฒŒ ๊ณ„์‚ฐํ•˜๋ฉด ์ •๋‹ต์„ ์ฐพ์•„๋‚ผ ์ˆ˜ ์žˆ๋‹ค. + +N์ด 0์ด ๋˜๊ฑฐ๋‚˜ x, y๊ฐ€ ๋ชจ๋‘ 1 ์ดํ•˜์ด๋ฉด ์ฐพ์œผ๋ ค๋Š” ์นธ์— ๋„๋‹ฌํ–ˆ๋‹ค๋Š” ๋œป์ด๋ฏ€๋กœ ๋‹ต์„ ์ถœ๋ ฅํ•˜๊ณ  ์ค‘์ง€ํ•œ๋‹ค. \ No newline at end of file diff --git a/_posts/2023-12-05-boj_1157.markdown b/_posts/2023-12-05-boj_1157.markdown new file mode 100644 index 00000000000..e016d05641c --- /dev/null +++ b/_posts/2023-12-05-boj_1157.markdown @@ -0,0 +1,58 @@ +--- +title: "[BOJ]๋‹จ์–ด ๊ณต๋ถ€ - 1157 (B1)" +author: kwon +date: 2023-12-05T14:00:00 +0900 +categories: [boj, bronze] +tags: [implementation, string] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 2 ์ดˆ | 128 MB | + +# ๋ฌธ์ œ + +์•ŒํŒŒ๋ฒณ ๋Œ€์†Œ๋ฌธ์ž๋กœ ๋œ ๋‹จ์–ด๊ฐ€ ์ฃผ์–ด์ง€๋ฉด, ์ด ๋‹จ์–ด์—์„œ ๊ฐ€์žฅ ๋งŽ์ด ์‚ฌ์šฉ๋œ ์•ŒํŒŒ๋ฒณ์ด ๋ฌด์—‡์ธ์ง€ ์•Œ์•„๋‚ด๋Š” ํ”„๋กœ๊ทธ๋žจ์„ ์ž‘์„ฑํ•˜์‹œ์˜ค. ๋‹จ, ๋Œ€๋ฌธ์ž์™€ ์†Œ๋ฌธ์ž๋ฅผ ๊ตฌ๋ถ„ํ•˜์ง€ ์•Š๋Š”๋‹ค. + +# ์ž…๋ ฅ + +์ฒซ์งธ ์ค„์— ์•ŒํŒŒ๋ฒณ ๋Œ€์†Œ๋ฌธ์ž๋กœ ์ด๋ฃจ์–ด์ง„ ๋‹จ์–ด๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. ์ฃผ์–ด์ง€๋Š” ๋‹จ์–ด์˜ ๊ธธ์ด๋Š” 1,000,000์„ ๋„˜์ง€ ์•Š๋Š”๋‹ค. + +# ์ถœ๋ ฅ + +์ฒซ์งธ ์ค„์— ์ด ๋‹จ์–ด์—์„œ ๊ฐ€์žฅ ๋งŽ์ด ์‚ฌ์šฉ๋œ ์•ŒํŒŒ๋ฒณ์„ ๋Œ€๋ฌธ์ž๋กœ ์ถœ๋ ฅํ•œ๋‹ค. ๋‹จ, ๊ฐ€์žฅ ๋งŽ์ด ์‚ฌ์šฉ๋œ ์•ŒํŒŒ๋ฒณ์ด ์—ฌ๋Ÿฌ ๊ฐœ ์กด์žฌํ•˜๋Š” ๊ฒฝ์šฐ์—๋Š” ?๋ฅผ ์ถœ๋ ฅํ•œ๋‹ค. + +# ํ’€์ด + +dictionary๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ๋“ฑ์žฅํ•˜๋Š” ์•ŒํŒŒ๋ฒณ์˜ ๊ฐœ์ˆ˜๋ฅผ ์„ธ์–ด์ค€๋‹ค. ๊ทธ๋ฆฌ๊ณ  ํ•ด๋‹น ์•ŒํŒŒ๋ฒณ์˜ ๊ฐœ์ˆ˜๊ฐ€ ์ €์žฅ๋œ ์ตœ๋Œ€ ๊ฐœ์ˆ˜๋ณด๋‹ค ๋งŽ์œผ๋ฉด ๊ทธ ์•ŒํŒŒ๋ฒณ๊ณผ ๊ทธ ๊ฐœ์ˆ˜๋ฅผ ์ €์žฅํ•œ๋‹ค. ๋งŒ์•ฝ ์•ŒํŒŒ๋ฒณ์˜ ๊ฐœ์ˆ˜๊ฐ€ ์ €์žฅ๋œ ์ตœ๋Œ€ ๊ฐœ์ˆ˜์™€ ์ •ํ™•ํžˆ ๊ฐ™๋‹ค๋ฉด ๊ฐ€์žฅ ๋งŽ์ด ์‚ฌ์šฉ๋œ ์•ŒํŒŒ๋ฒณ์ด ๊ทธ ์ˆœ๊ฐ„์— ์—ฌ๋Ÿฌ ๊ฐœ ์กด์žฌํ•œ๋‹ค๋Š” ๊ฒƒ์ด๋ฏ€๋กœ ๋ณ€์ˆ˜(`same_chk`)๋ฅผ ํ†ตํ•ด ์ฒดํฌํ•ด์ค€๋‹ค. + +๋งŒ์•ฝ ์ฒดํฌ ๋˜์–ด ์žˆ๋‹ค๋ฉด โ€˜?โ€™๋ฅผ, ์•„๋‹ˆ๋ฉด ์ €์žฅ๋˜์–ด ์žˆ๋Š” ์ตœ๋Œ€ ๊ฐœ์ˆ˜์˜ ์•ŒํŒŒ๋ฒณ์„ ์ถœ๋ ฅํ•œ๋‹ค. + +## ์ฝ”๋“œ + +```python +from collections import defaultdict +word = input().upper() + +counter = defaultdict(int) + +max_cnt = 0 +max_alpha = '' +same_chk = False + +for a in word: + counter[a] += 1 + if counter[a] > max_cnt: + max_cnt = counter[a] + max_alpha = a + same_chk = False + elif counter[a] == max_cnt: + same_chk = True + +if same_chk: + print('?') +else: + print(max_alpha) +``` \ No newline at end of file diff --git a/_posts/2024-07-31-swea_5215.markdown b/_posts/2024-07-31-swea_5215.markdown new file mode 100644 index 00000000000..2eaf5f91124 --- /dev/null +++ b/_posts/2024-07-31-swea_5215.markdown @@ -0,0 +1,198 @@ +--- +title: "[SWEA]ํ–„๋ฒ„๊ฑฐ ๋‹ค์ด์–ดํŠธ - 5215 (D3)" +author: kwon +date: 2024-07-31T14:00:00 +0900 +categories: [swea, d3] +tags: [dfs, greedy algorithm] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 16 ์ดˆ | ํž™ ์ •์  ๋ฉ”๋ชจ๋ฆฌ: 256 MB / ์Šคํƒ ๋ฉ”๋ชจ๋ฆฌ 1MB | + +# ๋ฌธ์ œ + + +ํ‰์†Œ ํ–„๋ฒ„๊ฑฐ๋ฅผ ์ข‹์•„ํ•˜๋˜ ๋ฏผ๊ธฐ๋Š” ์ตœ๊ทผ ๋ถ€์ฉ ๋Š˜์–ด๋‚œ ์‚ด ๋•Œ๋ฌธ์— ๊ฑฑ์ •์ด ๋งŽ๋‹ค. + +๊ทธ๋ ‡๋‹ค๊ณ  ํ–„๋ฒ„๊ฑฐ๋ฅผ ํฌ๊ธฐํ•  ์ˆ˜ ์—†์—ˆ๋˜ ๋ฏผ๊ธฐ๋Š” ํ–„๋ฒ„๊ฑฐ์˜ ๋ง›์€ ์ตœ๋Œ€ํ•œ ์œ ์ง€ํ•˜๋ฉด์„œ ์ •ํ•ด์ง„ ์นผ๋กœ๋ฆฌ๋ฅผ ๋„˜์ง€ ์•Š๋Š” ํ–„๋ฒ„๊ฑฐ๋ฅผ ์ฃผ๋ฌธํ•˜์—ฌ ๋จน์œผ๋ ค๊ณ  ํ•œ๋‹ค. + + +๋ฏผ๊ธฐ๊ฐ€ ์ฃผ๋กœ ์ด์šฉํ•˜๋Š” ํ–„๋ฒ„๊ฑฐ ๊ฐ€๊ฒŒ์—์„œ๋Š” ๊ณ ๊ฐ์ด ์›ํ•˜๋Š” ์กฐํ•ฉ์œผ๋กœ ํ–„๋ฒ„๊ฑฐ๋ฅผ ๋งŒ๋“ค์–ด์„œ ์ค€๋‹ค. + +ํ•˜์ง€๋งŒ ์žฌ๋ฃŒ๋Š” ๋ฏธ๋ฆฌ ๋งŒ๋“ค์–ด์„œ ์ค€๋น„ํ•ด๋†“๊ธฐ ๋•Œ๋ฌธ์— ์กฐํ•ฉ์— ๋“ค์–ด๊ฐ€๋Š” ์žฌ๋ฃŒ๋ฅผ ์ž˜๋ผ์„œ ์กฐํ•ฉํ•ด์ฃผ์ง€๋Š” ์•Š๊ณ , ์žฌ๋ฃŒ๋ฅผ ์„ ํƒํ•˜๋ฉด ์ค€๋น„ํ•ด๋†“์€ ์žฌ๋ฃŒ๋ฅผ ๊ทธ๋Œ€๋กœ ์‚ฌ์šฉํ•˜์—ฌ ์กฐํ•ฉํ•ด์ค€๋‹ค. + +๋ฏผ๊ธฐ๋Š” ์ด ๊ฐ€๊ฒŒ์—์„œ ์ž์‹ ์ด ๋จน์—ˆ๋˜ ํ–„๋ฒ„๊ฑฐ์˜ ์žฌ๋ฃŒ์— ๋Œ€ํ•œ ๋ง›์„ ์ž์‹ ์˜ ์˜ค๋žœ ๊ฒฝํ—˜์„ ํ†ตํ•ด ์ ์ˆ˜๋ฅผ ๋งค๊ฒจ๋†“์•˜๋‹ค. + +๋ฏผ๊ธฐ์˜ ํ–„๋ฒ„๊ฑฐ ์žฌ๋ฃŒ์— ๋Œ€ํ•œ ์ ์ˆ˜์™€ ๊ฐ€๊ฒŒ์—์„œ ์ œ๊ณตํ•˜๋Š” ์žฌ๋ฃŒ์— ๋Œ€ํ•œ ์นผ๋กœ๋ฆฌ๊ฐ€ ์ฃผ์–ด์กŒ์„ ๋•Œ, + +๋ฏผ๊ธฐ๊ฐ€ ์ข‹์•„ํ•˜๋Š” ํ–„๋ฒ„๊ฑฐ๋ฅผ ๋จน์œผ๋ฉด์„œ๋„ ๋‹ค์ด์–ดํŠธ์— ์„ฑ๊ณตํ•  ์ˆ˜ ์žˆ๋„๋ก ์ •ํ•ด์ง„ ์นผ๋กœ๋ฆฌ ์ดํ•˜์˜ ์กฐํ•ฉ ์ค‘์—์„œ ๋ฏผ๊ธฐ๊ฐ€ ๊ฐ€์žฅ ์„ ํ˜ธํ•˜๋Š” ํ–„๋ฒ„๊ฑฐ๋ฅผ ์กฐํ•ฉํ•ด์ฃผ๋Š” ํ”„๋กœ๊ทธ๋žจ์„ ๋งŒ๋“ค์–ด๋ณด์ž. + +(๋‹จ ์—ฌ๋Ÿฌ ์žฌ๋ฃŒ๋ฅผ ์กฐํ•ฉํ•˜์˜€์„ ํ–„๋ฒ„๊ฑฐ์˜ ์„ ํ˜ธ๋„๋Š” ์กฐํ•ฉ๋œ ์žฌ๋ฃŒ๋“ค์˜ ๋ง›์— ๋Œ€ํ•œ ์ ์ˆ˜์˜ ํ•ฉ์œผ๋กœ ๊ฒฐ์ •๋˜๊ณ , ๊ฐ™์€ ์žฌ๋ฃŒ๋ฅผ ์—ฌ๋Ÿฌ ๋ฒˆ ์‚ฌ์šฉํ•  ์ˆ˜ ์—†์œผ๋ฉฐ, ํ–„๋ฒ„๊ฑฐ์˜ ์กฐํ•ฉ์˜ ์ œํ•œ์€ ์นผ๋กœ๋ฆฌ๋ฅผ ์ œ์™ธํ•˜๊ณ ๋Š” ์—†๋‹ค.) + +# ์ž…๋ ฅ + +์ฒซ ๋ฒˆ์งธ ์ค„์— ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์˜ ์ˆ˜ T๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. + + +๊ฐ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์˜ ์ฒซ ๋ฒˆ์งธ ์ค„์—๋Š” ์žฌ๋ฃŒ์˜ ์ˆ˜, ์ œํ•œ ์นผ๋กœ๋ฆฌ๋ฅผ ๋‚˜ํƒ€๋‚ด๋Š” N, L(1 โ‰ค N โ‰ค 20, 1 โ‰ค L โ‰ค 104)๊ฐ€ ๊ณต๋ฐฑ์œผ๋กœ ๊ตฌ๋ถ„๋˜์–ด ์ฃผ์–ด์ง„๋‹ค. + + +๋‹ค์Œ N๊ฐœ์˜ ์ค„์—๋Š” ์žฌ๋ฃŒ์— ๋Œ€ํ•œ ๋ฏผ๊ธฐ์˜ ๋ง›์— ๋Œ€ํ•œ ์ ์ˆ˜์™€ ์นผ๋กœ๋ฆฌ๋ฅผ ๋‚˜ํƒ€๋‚ด๋Š” Ti, Ki(1 โ‰ค $T_i$ โ‰ค 103, 1 โ‰ค Ki โ‰ค 103)๊ฐ€ ๊ณต๋ฐฑ์œผ๋กœ ๊ตฌ๋ถ„๋˜์–ด ์ฃผ์–ด์ง„๋‹ค. + +# ์ถœ๋ ฅ + +๊ฐ ์ค„๋งˆ๋‹ค "#T" (T๋Š” ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค ๋ฒˆํ˜ธ)๋ฅผ ์ถœ๋ ฅํ•œ ๋’ค, ์ฃผ์–ด์ง„ ์ œํ•œ ์นผ๋กœ๋ฆฌ ์ดํ•˜์˜ ์กฐํ•ฉ์ค‘์—์„œ ๊ฐ€์žฅ ๋ง›์— ๋Œ€ํ•œ ์ ์ˆ˜๊ฐ€ ๋†’์€ ํ–„๋ฒ„๊ฑฐ์˜ ์ ์ˆ˜๋ฅผ ์ถœ๋ ฅํ•œ๋‹ค. + +# ํ’€์ด + +## DFS๋ฅผ ํ™œ์šฉํ•œ ํ’€์ด + +๊ฐ€์žฅ ๋จผ์ € ๋– ์˜ค๋ฅธ ๋ฐฉ์‹์€ DFS๋ฅผ ํ™œ์šฉํ•œ ๋ฐฉ์‹์ด๋‹ค. +ํ–„๋ฒ„๊ฑฐ ์žฌ๋ฃŒ๋ฅผ ํ•˜๋‚˜์”ฉ ์‚ฌ์šฉํ•˜๋ฉฐ(์žฌ๋ฃŒ ์ˆ˜๋ฅผ ํ•˜๋‚˜์”ฉ ์ค„์—ฌ๊ฐ€๋ฉฐ) ์žฌ๊ท€๋ฅผ ๋ฐ˜๋ณตํ•œ๋‹ค. +์‚ฌ์šฉํ•œ ์žฌ๋ฃŒ์— ๋งž๊ฒŒ ์นผ๋กœ๋ฆฌ์™€ ์ ์ˆ˜๋ฅผ ๋”ํ•ด์„œ ๋‹ค์Œ ์žฌ๊ท€์˜ ๋งค๊ฐœ๋ณ€์ˆ˜๋กœ ๋„˜๊ฒจ์ค€๋‹ค. +๊ทธ๋ฆฌ๊ณ  ๋งค ์‹œํ–‰๋งˆ๋‹ค ์ ์ˆ˜๊ฐ€ ์ตœ๋Œ€์ธ์ง€ ํ™•์ธํ•˜๊ณ  ๊ธฐ๋กํ•ด์ค€๋‹ค. + +ํ•˜์ง€๋งŒ ํ˜„์žฌ ์žฌ๋ฃŒ๊นŒ์ง€์˜ ์นผ๋กœ๋ฆฌ ํ•ฉ์ด ์ œํ•œ ์นผ๋กœ๋ฆฌ๋ณด๋‹ค ๋†’๋‹ค๋ฉด ๋” ์ด์ƒ ํƒ์ƒ‰ํ•  ํ•„์š”๊ฐ€ ์—†์œผ๋ฏ€๋กœ ๋„˜์–ด๊ฐ„๋‹ค. + +### ์ฝ”๋“œ + +```py +test_case = int(input()) + + +# ์ œํ•œ ์นผ๋กœ๋ฆฌ ๋‚ด์—์„œ ์ตœ๋Œ€์˜ ๋ง› +def search_best(hamburgers, sum_cal=0, sum_score=0): + global max_score + max_score = max(max_score, sum_score) + + for i, (score, cal) in enumerate(hamburgers): + if sum_cal + cal > l: + continue + search_best(hamburgers[i + 1:], sum_cal + cal, sum_score + score) + + +for t in range(test_case): + n, l = map(int, input().split()) + + hamburgers = [list(map(int, input().split())) for _ in range(n)] + + max_score = 0 + + search_best(hamburgers) + + print(f"#{t + 1} {max_score}") +``` + +## DP๋ฅผ ํ™œ์šฉํ•œ ํ’€์ด + +์ด ๋ฌธ์ œ๋ฅผ ๋‹ค์‹œ ์ƒ๊ฐํ•ด๋ณด๋ฉด ์žฌ๋ฃŒ๋ฅผ ๊ณ ๋ฅด๊ฑฐ๋‚˜(1) ์•ˆ๊ณ ๋ฅด๋ฉด์„œ(0) ์ฃผ์–ด์ง„ ์นผ๋กœ๋ฆฌ(์šฉ๋Ÿ‰) ๋‚ด์—์„œ ์ตœ๋Œ€์˜ ์ ์ˆ˜(์ด๋“)์„ ์–ป๋Š” ๊ฒƒ์ด ๋ชฉ์ ์ด๋ฏ€๋กœ, 0-1 ๋ฐฐ๋‚ญ ๋ฌธ์ œ์™€ ๋™์ผํ•˜๋‹ค. + +์ด์— ์ด์ „๊นŒ์ง€์˜ ๋ง› ์ ์ˆ˜์˜ ์ตœ๋Œ€์น˜๋ฅผ ํ™œ์šฉํ•˜์—ฌ, ์นผ๋กœ๋ฆฌ ์ œํ•œ `l`์—์„œ ๋‚ด๋ ค๊ฐ€๋ฉฐ ๊ฐ€์žฅ ๋†’์€ ์ ์ˆ˜๋ฅผ ๊ณ„์‚ฐํ•œ๋‹ค. +๋ง๋กœ ํ•˜๋‹ˆ ์–ด๋ ค์šฐ๋‹ˆ ์•„๋ž˜ ์˜ˆ์‹œ๋ฅผ ๋ณด์ž. + +DP๋ฅผ ์œ„ํ•œ ์ ํ™”์‹์€ ๋‹ค์Œ๊ณผ ๊ฐ™๋””. +```py +dp = [0] * (l + 1) +... +dp[j] = max(dp[j], dp[j - cal] + score) +``` +์ด ์ ํ™”์‹์„ ์•„๋ž˜ ์˜ˆ์‹œ ์ž…๋ ฅ์œผ๋กœ ์—ฐ์‚ฐ์„ ์ง„ํ–‰ํ•ด๋ณด์ž. + +```bash +1 +5 1000 +100 200 +300 500 +250 300 +500 1000 +400 400 +``` + +์ฒซ ์ž…๋ ฅ์€ ์ ์ˆ˜ 100, ์นผ๋กœ๋ฆฌ 200์˜ ์žฌ๋ฃŒ์ด๋‹ค. (ํŽธ์˜์ƒ 100 ๋‹จ์œ„๋กœ ์ธ๋ฑ์Šค๋ฅผ ํ‘œ์‹œํ–ˆ์ง€๋งŒ ์‹ค์ œ๋กœ๋Š” 1๋‹จ์œ„์ด๋‹ค.) + + +|score|cal| +|---|---| +|100|200| + +|**$j$** |1000|900|800|700|600|500|400|300|200|100|0| +|:---------:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:| +|$j-cal$ |800|700|600|500|400|300|200|100|0|-|-| +|$DP_{before}$|0|0|0|0|0|0|0|0|0|0|0| +|$DP_{next}$|100|100|100|100|100|100|100|100|100|0|0| + +์ ํ™”์‹์— ๋”ฐ๋ผ ํ˜„์žฌ ์ธ๋ฑ์Šค์˜ (dp ๋ฐฐ์—ด ๊ฐ’)๊ณผ (`j - cal` ์ธ๋ฑ์Šค ๊ฐ’ + ํ˜„์žฌ `score` ๊ฐ’) ์ค‘ ํฐ ๊ฐ’์„ ์ทจํ•œ๋‹ค. + +์ฆ‰, ์ง€๊ธˆ๊นŒ์ง€ ์ €์žฅํ•œ ํ˜„์žฌ ์œ„์น˜์˜ ์นผ๋กœ๋ฆฌ `j`์™€ (`score` + `j-cal`) ์นผ๋กœ๋ฆฌ๋กœ ๋งŒ๋“ค ์ˆ˜ ์žˆ๋Š” ๋ง› ์ ์ˆ˜๋ฅผ ๋น„๊ตํ•˜๋Š” ๊ฒƒ์ด๋‹ค. + +ํ•œ ์Šคํƒญ ๋” ์ง„ํ–‰ํ•ด๋ณด์ž. + +|score|cal| +|---|---| +|300|500| + +|**$j$** |1000|900|800|700|600|500|400|300|200|100|0| +|:---------:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:| +|$j-cal$ |500|400|300|200|100|0|-|-|-|-|-| +|$DP_{before}$|100|100|100|100|100|100|100|100|100|0|0| +|$DP_{next}$|400|400|400|400|300|300|100|100|100|0|0| + +`j`๊ฐ€ 1000 ~ 700์ผ ๋•Œ๊นŒ์ง€๋Š” ๊ธฐ์กด DP ๋ฐฐ์—ด์˜ ์ธ๋ฑ์Šค 500 ~ 200๊นŒ์ง€์˜ ๊ฐ’๊ณผ ํ•ฉํ•˜์—ฌ ๊ธฐ์กด ๊ฐ’๊ณผ ๋น„๊ตํ•œ๋‹ค. ๋น„๊ต ๊ฒฐ๊ณผ ์ƒˆ๋กœ์šด ๊ฐ’์ด ๋” ํฌ๊ธฐ ๋•Œ๋ฌธ์— 400(100 + 300)์œผ๋กœ ๊ฐฑ์‹ ํ•œ๋‹ค. `j`๊ฐ€ 600 ~ 500์ผ ๋•Œ๋„ ๋งˆ์ฐฌ๊ฐ€์ง€๋กœ ์ง„ํ–‰ํ•˜์ง€๋งŒ ๊ธฐ์กด ๊ฐ’์ด 0์ด๊ธฐ ๋•Œ๋ฌธ์— 300(0 + 300)์œผ๋กœ ๊ฐฑ์‹ ํ•œ๋‹ค. + +์ดํ›„์—๋„ ๊ฐ™์€ ๊ณผ์ •์œผ๋กœ ์ง„ํ–‰ํ•œ๋‹ค. + +|score|cal| +|---|---| +|250|300| + +|**$j$** |1000|900|800|700|600|500|400|300|200|100|0| +|:---------:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:| +|$j-cal$ |700|600|500|400|300|200|100|0|-|-|-| +|$DP_{before}$|400|400|400|400|300|300|100|100|100|0|0| +|$DP_{next}$|650|550|550|400|350|350|250|250|100|0|0| + +|score|cal| +|---|---| +|500|1000| + +|**$j$** |1000|900|800|700|600|500|400|300|200|100|0| +|:---------:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:| +|$j-cal$ |0|-|-|-|-|-|-|-|-|-|-| +|$DP_{before}$|650|550|550|400|350|350|250|250|100|0|0| +|$DP_{next}$|650|550|550|400|350|350|250|250|100|0|0| + +|score|cal| +|---|---| +|400|400| + +|**$j$** |1000|900|800|700|600|500|400|300|200|100|0| +|:---------:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:| +|$j-cal$ |600|500|400|300|200|100|0|-|-|-|-| +|$DP_{before}$|650|550|550|400|350|350|250|250|100|0|0| +|$DP_{next}$|750|750|650|650|500|400|400|250|100|0|0| + +๋งˆ์ง€๋ง‰ ์‹œํ–‰์ด ๋๋‚œ ํ›„ DP ๋ฐฐ์—ด์—์„œ ์ตœ๋Œ€๊ฐ’์„ ์ถœ๋ ฅํ•œ๋‹ค. + +### ์ฝ”๋“œ + +```python +test_case = int(input()) + +for t in range(test_case): + n, l = map(int, input().split()) + + hamburgers = [list(map(int, input().split())) for _ in range(n)] + + dp = [0] * (l + 1) + + for score, cal in hamburgers: + for j in range(l, cal - 1, -1): + dp[j] = max(dp[j], dp[j - cal] + score) + + max_score = max(dp) + + print(f"#{t + 1} {max_score}") +``` \ No newline at end of file diff --git a/_posts/2024-08-06-swea_4012.markdown b/_posts/2024-08-06-swea_4012.markdown new file mode 100644 index 00000000000..9da40631e21 --- /dev/null +++ b/_posts/2024-08-06-swea_4012.markdown @@ -0,0 +1,144 @@ +--- +title: "[SWEA]์š”๋ฆฌ์‚ฌ - 4012 (๋ชจ์˜ ์—ญ๋Ÿ‰ ํ…Œ์ŠคํŠธ)" +author: kwon +date: 2024-08-06T14:00:00 +0900 +categories: [swea, ๋ชจ์˜์—ญ๋Ÿ‰] +tags: [combinatorics, backtracking] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 10 ์ดˆ | ํž™ ์ •์  ๋ฉ”๋ชจ๋ฆฌ: 256 MB / ์Šคํƒ ๋ฉ”๋ชจ๋ฆฌ 1MB | + +# ๋ฌธ์ œ + + +๋‘ ๋ช…์˜ ์†๋‹˜์—๊ฒŒ ์Œ์‹์„ ์ œ๊ณตํ•˜๋ ค๊ณ  ํ•œ๋‹ค. + +๋‘ ๋ช…์˜ ์†๋‹˜์€ ์‹์„ฑ์ด ๋น„์Šทํ•˜๊ธฐ ๋•Œ๋ฌธ์—, ์ตœ๋Œ€ํ•œ ๋น„์Šทํ•œ ๋ง›์˜ ์Œ์‹์„ ๋งŒ๋“ค์–ด ๋‚ด์•ผ ํ•œ๋‹ค. + +N๊ฐœ์˜ ์‹์žฌ๋ฃŒ๊ฐ€ ์žˆ๋‹ค. + +์‹์žฌ๋ฃŒ๋“ค์„ ๊ฐ๊ฐ N / 2๊ฐœ์”ฉ ๋‚˜๋ˆ„์–ด ๋‘ ๊ฐœ์˜ ์š”๋ฆฌ๋ฅผ ํ•˜๋ ค๊ณ  ํ•œ๋‹ค. (N์€ ์ง์ˆ˜์ด๋‹ค.) + +์ด๋•Œ, ๊ฐ๊ฐ์˜ ์Œ์‹์„ A์Œ์‹, B์Œ์‹์ด๋ผ๊ณ  ํ•˜์ž. + +๋น„์Šทํ•œ ๋ง›์˜ ์Œ์‹์„ ๋งŒ๋“ค๊ธฐ ์œ„ํ•ด์„œ๋Š” A์Œ์‹๊ณผ B์Œ์‹์˜ ๋ง›์˜ ์ฐจ์ด๊ฐ€ ์ตœ์†Œ๊ฐ€ ๋˜๋„๋ก ์žฌ๋ฃŒ๋ฅผ ๋ฐฐ๋ถ„ํ•ด์•ผ ํ•œ๋‹ค. + +์Œ์‹์˜ ๋ง›์€ ์Œ์‹์„ ๊ตฌ์„ฑํ•˜๋Š” ์‹์žฌ๋ฃŒ๋“ค์˜ ์กฐํ•ฉ์— ๋”ฐ๋ผ ๋‹ค๋ฅด๊ฒŒ ๋œ๋‹ค. + + + +์‹์žฌ๋ฃŒ i๋Š” ์‹์žฌ๋ฃŒ j์™€ ๊ฐ™์ด ์š”๋ฆฌํ•˜๊ฒŒ ๋˜๋ฉด ๊ถํ•ฉ์ด ์ž˜ ๋งž์•„ ์‹œ๋„ˆ์ง€ Sij๊ฐ€ ๋ฐœ์ƒํ•œ๋‹ค. (1 โ‰ค i โ‰ค N, 1 โ‰ค j โ‰ค N, i โ‰  j) + +๊ฐ ์Œ์‹์˜ ๋ง›์€ ์Œ์‹์„ ๊ตฌ์„ฑํ•˜๋Š” ์‹์žฌ๋ฃŒ๋“ค๋กœ๋ถ€ํ„ฐ ๋ฐœ์ƒํ•˜๋Š” ์‹œ๋„ˆ์ง€ Sij๋“ค์˜ ํ•ฉ์ด๋‹ค. + + + +์‹์žฌ๋ฃŒ i๋ฅผ ์‹์žฌ๋ฃŒ j์™€ ๊ฐ™์ด ์š”๋ฆฌํ•˜๊ฒŒ ๋˜๋ฉด ๋ฐœ์ƒํ•˜๋Š” ์‹œ๋„ˆ์ง€ Sij์˜ ์ •๋ณด๊ฐ€ ์ฃผ์–ด์ง€๊ณ , ๊ฐ€์ง€๊ณ  ์žˆ๋Š” ์‹์žฌ๋ฃŒ๋ฅผ ์ด์šฉํ•ด A์Œ์‹๊ณผ B์Œ์‹์„ ๋งŒ๋“ค ๋•Œ, ๋‘ ์Œ์‹ ๊ฐ„์˜ ๋ง›์˜ ์ฐจ์ด๊ฐ€ ์ตœ์†Œ๊ฐ€ ๋˜๋Š” ๊ฒฝ์šฐ๋ฅผ ์ฐพ๊ณ  ๊ทธ ์ตœ์†Ÿ๊ฐ’์„ ์ •๋‹ต์œผ๋กœ ์ถœ๋ ฅํ•˜๋Š” ํ”„๋กœ๊ทธ๋žจ์„ ์ž‘์„ฑํ•˜๋ผ. + + +์ž์„ธํ•œ ์˜ˆ์‹œ๋Š” [์—ฌ๊ธฐ๋กœ](https://swexpertacademy.com/main/talk/solvingClub/problemView.do?solveclubId=AZC_w6Z6yygDFAQW&contestProbId=AWIeUtVakTMDFAVH&probBoxId=AZDJUP6q-fgDFAVs&type=PROBLEM&problemBoxTitle=2d_recommend&problemBoxCnt=2&&&&&&). + +## ์ œ์•ฝ ์‚ฌํ•ญ + +1. ์‹œ๊ฐ„ ์ œํ•œ : ์ตœ๋Œ€ 50๊ฐœ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค๋ฅผ ๋ชจ๋‘ ํ†ต๊ณผํ•˜๋Š” ๋ฐ C / C++ / Java ๋ชจ๋‘ 3์ดˆ + +2. ์‹์žฌ๋ฃŒ์˜ ์ˆ˜ N์€ 4์ด์ƒ 16์ดํ•˜์˜ ์ง์ˆ˜์ด๋‹ค. $(4 โ‰ค N โ‰ค 16)$ + +3. ์‹œ๋„ˆ์ง€ Sij๋Š” 1์ด์ƒ 20,000์ดํ•˜์˜ ์ •์ˆ˜์ด๋‹ค. $(1 โ‰ค Sij โ‰ค 20,000, i โ‰  j)$ + +4. i์™€ j๊ฐ€ ์„œ๋กœ ๊ฐ™์€ ๊ฒฝ์šฐ์˜ Sij๊ฐ’์€ ์ •์˜๋˜์ง€ ์•Š๋Š”๋‹ค. ์ž…๋ ฅ์—์„œ๋Š” 0์œผ๋กœ ์ฃผ์–ด์ง„๋‹ค. + +# ์ž…๋ ฅ + +์ž…๋ ฅ์˜ ๋งจ ์ฒซ ์ค„์—๋Š” ์ด ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์˜ ๊ฐœ์ˆ˜ T๊ฐ€ ์ฃผ์–ด์ง€๊ณ , + +๊ทธ ๋‹ค์Œ ์ค„๋ถ€ํ„ฐ T๊ฐœ์˜ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. + +๊ฐ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์˜ ์ฒซ ๋ฒˆ์งธ ์ค„์—๋Š” ์‹์žฌ๋ฃŒ์˜ ์ˆ˜ N์ด ์ฃผ์–ด์ง„๋‹ค. + +๋‹ค์Œ N๊ฐœ์˜ ์ค„์—๋Š” N * N๊ฐœ์˜ ์‹œ๋„ˆ์ง€ Sij๊ฐ’๋“ค์ด ์ฃผ์–ด์ง„๋‹ค. i์™€ j๊ฐ€ ์„œ๋กœ ๊ฐ™์€ ๊ฒฝ์šฐ๋Š” 0์œผ๋กœ ์ฃผ์–ด์ง„๋‹ค. + +# ์ถœ๋ ฅ + +ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค ๊ฐœ์ˆ˜๋งŒํผ T๊ฐœ์˜ ์ค„์— ๊ฐ๊ฐ์˜ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์— ๋Œ€ํ•œ ๋‹ต์„ ์ถœ๋ ฅํ•œ๋‹ค. + +๊ฐ ์ค„์€ "#t"๋กœ ์‹œ์ž‘ํ•˜๊ณ  ๊ณต๋ฐฑ์„ ํ•˜๋‚˜ ๋‘” ๋‹ค์Œ ์ •๋‹ต์„ ์ถœ๋ ฅํ•œ๋‹ค. (t ๋Š” 1๋ถ€ํ„ฐ ์‹œ์ž‘ํ•˜๋Š” ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์˜ ๋ฒˆํ˜ธ์ด๋‹ค.) + +์ •๋‹ต์€ ๋‘ ์Œ์‹ ๊ฐ„์˜ ๋ง›์˜ ์ฐจ์ด๊ฐ€ ์ตœ์†Œ๊ฐ€ ๋˜๋„๋ก A์Œ์‹๊ณผ B์Œ์‹์„ ๋งŒ๋“ค์—ˆ์„ ๋•Œ ๊ทธ ์ฐจ์ด ๊ฐ’์ด๋‹ค. + +# ํ’€์ด + +์ฃผ์–ด์ง„ ์žฌ๋ฃŒ์˜ ์ ˆ๋ฐ˜์”ฉ ์‚ฌ์šฉํ•˜์—ฌ ์š”๋ฆฌ๋ฅผ ๋งŒ๋“ค์–ด ๋‘ ์š”๋ฆฌ์˜ ๋ง› ์ฐจ์ด๊ฐ€ ์ตœ์†Œ๋กœ ๋˜๋„๋ก ํ•˜๋Š” ๋ฌธ์ œ์ด๋‹ค. + +์ด ๋Œธ ์žฌ๋ฃŒ์˜ ์ ˆ๋ฐ˜์„ ์‚ฌ์šฉํ•˜๋Š” ๊ฒƒ์„ ๋‹ค์Œ๊ณผ ๊ฐ™์€ ์กฐํ•ฉ์œผ๋กœ ๋‚˜ํƒ€๋‚ผ ์ˆ˜ ์žˆ๋‹ค. + +$$ +{N}\choose{N//2} +$$ + +๋‘ ์š”๋ฆฌ์— ๋Œ€ํ•œ ์žฌ๋ฃŒ๋ฅผ ๋ชจ๋‘ ๊ตฌํ•ด์•ผ ํ•˜๋‚˜? ํ•˜๊ณ  ์ƒ๊ฐํ•  ์ˆ˜ ์žˆ์ง€๋งŒ, ํ•œ ์š”๋ฆฌ์˜ ์žฌ๋ฃŒ๊ฐ€ ๊ฒฐ์ •๋˜๋ฉด ๋‚˜๋จธ์ง€ ํ•˜๋‚˜์˜ ์žฌ๋ฃŒ๋Š” ๊ฒฐ์ •๋˜๊ธฐ ๋•Œ๋ฌธ์— ๊ตณ์ด ๊ตฌํ•  ํ•„์š”๋Š” ์—†๋‹ค. + +๊ทธ๋ž˜์„œ `range(1, n)`์— ๋Œ€ํ•ด `n // 2` ๊ฐœ์˜ ์›์†Œ๋ฅผ ๊ฐ€์ง€๋Š” ์กฐํ•ฉ์„ ๊ตฌํ•˜๋„๋ก `search_recipe`๋ฅผ ์ž‘์„ฑํ•˜์˜€๋‹ค. ์ดํ›„ ์ „์ฒด ์ง‘ํ•ฉ์— ๋Œ€ํ•ด ์ฐจ์ง‘ํ•ฉํ•˜์—ฌ ๋ฐ˜๋Œ€์ชฝ ์กฐํ•ฉ์„ ๊ตฌํ•œ๋‹ค. + +```py +... + comb2 = list(index_set - set(comb)) +... +``` + +๊ตฌํ•ด์ง„ ์กฐํ•ฉ์œผ๋กœ ๋ง›์„ ๊ณ„์‚ฐํ•˜๊ณ  ์ฐจ์ด์˜ ์ตœ์†Ÿ๊ฐ’์„ ๊ตฌํ•˜๋ฉด ๋œ๋‹ค. + +```py +... + for i_idx, (i1, i2) in enumerate(zip(comb, comb2)): + for j1, j2 in zip(comb[i_idx + 1:], comb2[i_idx + 1:]): + food1 += recipe[i1][j1] + recipe[j1][i1] + food2 += recipe[i2][j2] + recipe[j2][i2] + min_diff = min(min_diff, abs(food1 - food2)) +... +``` + +๋งŒ๋“ค์–ด์ค€ ์กฐํ•ฉ ๋ฐฐ์—ด์˜ ๋งจ ์•ž์— `[0]`์„ ๋ถ™์—ฌ์ฃผ๋Š” ์ด์œ ๋Š” ์ฃผ์–ด์ง„ ์žฌ๋ฃŒ์˜ ๋ฒˆํ˜ธ๊ฐ€ 1๋ถ€ํ„ฐ ์‹œ์ž‘ํ•˜๊ธฐ ๋•Œ๋ฌธ์— ์ธ๋ฑ์Šค์™€ ๋งž์ถฐ์ฃผ๊ธฐ ์œ„ํ•จ์ด๋‹ค. + +## ์ฝ”๋“œ + +```py +test_case = int(input()) + +def search_recipe(index_list, n): + if n == 1 : + return [[i] for i in index_list] + result = [] + for i in range(len(index_list) - 1): + for j in search_recipe(index_list[i+1:], n - 1): + result.append([index_list[i]] + j) + + return result + + +for t in range(test_case): + n = int(input()) + min_diff = float('inf') + + recipe = [list(map(int, input().split())) for _ in range(n)] + + index_set = set(range(n)) + + comb_list = [[0] + c for c in search_recipe(list(range(1, n)), n // 2 - 1)] + + for comb in comb_list: + comb2 = list(index_set - set(comb)) + food1, food2 = 0, 0 + + for i_idx, (i1, i2) in enumerate(zip(comb, comb2)): + for j1, j2 in zip(comb[i_idx + 1:], comb2[i_idx + 1:]): + food1 += recipe[i1][j1] + recipe[j1][i1] + food2 += recipe[i2][j2] + recipe[j2][i2] + min_diff = min(min_diff, abs(food1 - food2)) + + print(f"#{t + 1} {min_diff}") +``` \ No newline at end of file diff --git a/_posts/2024-08-09-swea_4008.markdown b/_posts/2024-08-09-swea_4008.markdown new file mode 100644 index 00000000000..cae75760042 --- /dev/null +++ b/_posts/2024-08-09-swea_4008.markdown @@ -0,0 +1,136 @@ +--- +title: "[SWEA]์ˆซ์ž ๋งŒ๋“ค๊ธฐ - 4008 (๋ชจ์˜ ์—ญ๋Ÿ‰ ํ…Œ์ŠคํŠธ)" +author: kwon +date: 2024-08-09T14:00:00 +0900 +categories: [swea, ๋ชจ์˜์—ญ๋Ÿ‰] +tags: [dfs] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 10 ์ดˆ | ํž™ ์ •์  ๋ฉ”๋ชจ๋ฆฌ: 256 MB / ์Šคํƒ ๋ฉ”๋ชจ๋ฆฌ 1MB | + +# ๋ฌธ์ œ + + +์„ ํ‘œ๋Š” ๊ฒŒ์ž„์„ ํ†ตํ•ด ์‚ฌ์น™ ์—ฐ์‚ฐ์„ ๊ณต๋ถ€ํ•˜๊ณ  ์žˆ๋‹ค. + +N๊ฐœ์˜ ์ˆซ์ž๊ฐ€ ์ ํ˜€ ์žˆ๋Š” ๊ฒŒ์ž„ ํŒ์ด ์žˆ๊ณ , +, -, x, / ์˜ ์—ฐ์‚ฐ์ž ์นด๋“œ๋ฅผ ์ˆซ์ž ์‚ฌ์ด์— ๋ผ์›Œ ๋„ฃ์–ด ๋‹ค์–‘ํ•œ ๊ฒฐ๊ณผ ๊ฐ’์„ ๊ตฌํ•ด๋ณด๊ธฐ๋กœ ํ–ˆ๋‹ค. + +์ˆ˜์‹์„ ๊ณ„์‚ฐํ•  ๋•Œ ์—ฐ์‚ฐ์ž์˜ ์šฐ์„  ์ˆœ์œ„๋Š” ๊ณ ๋ คํ•˜์ง€ ์•Š๊ณ  ์™ผ์ชฝ์—์„œ ์˜ค๋ฅธ์ชฝ์œผ๋กœ ์ฐจ๋ก€๋Œ€๋กœ ๊ณ„์‚ฐํ•œ๋‹ค. + +์˜ˆ๋ฅผ ๋“ค์–ด 1, 2, 3 ์ด ์ ํžŒ ๊ฒŒ์ž„ ํŒ์— +์™€ x๋ฅผ ๋„ฃ์–ด 1 + 2 * 3์„ ๋งŒ๋“ค๋ฉด 1 + 2๋ฅผ ๋จผ์ € ๊ณ„์‚ฐํ•˜๊ณ  ๊ทธ ๋’ค์— * ๋ฅผ ๊ณ„์‚ฐํ•œ๋‹ค. + +์ฆ‰ 1+2*3์˜ ๊ฒฐ๊ณผ๋Š” 9์ด๋‹ค. + + +์ฃผ์–ด์ง„ ์—ฐ์‚ฐ์ž ์นด๋“œ๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ์ˆ˜์‹์„ ๊ณ„์‚ฐํ–ˆ์„ ๋•Œ ๊ทธ ๊ฒฐ๊ณผ๊ฐ€ ์ตœ๋Œ€๊ฐ€ ๋˜๋Š” ์ˆ˜์‹๊ณผ ์ตœ์†Œ๊ฐ€ ๋˜๋Š” ์ˆ˜์‹์„ ์ฐพ๊ณ , ๋‘ ๊ฐ’์˜ ์ฐจ์ด๋ฅผ ์ถœ๋ ฅํ•˜์‹œ์˜ค. + + +์ž์„ธํ•œ ์˜ˆ์‹œ๋Š” [์—ฌ๊ธฐ๋กœ](https://swexpertacademy.com/main/talk/solvingClub/problemView.do?solveclubId=AZC_w6Z6yygDFAQW&contestProbId=AWIeRZV6kBUDFAVH&probBoxId=AZC_w6Z6yykDFAQW&type=PROBLEM&problemBoxTitle=1w_homework&problemBoxCnt=5) + +## ์ œ์•ฝ ์‚ฌํ•ญ + +1. ์‹œ๊ฐ„ ์ œํ•œ : ์ตœ๋Œ€ 50 ๊ฐœ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค๋ฅผ ๋ชจ๋‘ ํ†ต๊ณผํ•˜๋Š” ๋ฐ C / C++ / Java ๋ชจ๋‘ 3 ์ดˆ + +2. ๊ฒŒ์ž„ ํŒ์— ์ ํžŒ ์ˆซ์ž์˜ ๊ฐœ์ˆ˜ N ์€ 3 ์ด์ƒ 12 ์ดํ•˜์˜ ์ •์ˆ˜์ด๋‹ค. ( 3 โ‰ค N โ‰ค 12 ) + +3. ์—ฐ์‚ฐ์ž ์นด๋“œ ๊ฐœ์ˆ˜์˜ ์ด ํ•ฉ์€ ํ•ญ์ƒ N - 1 ์ด๋‹ค. + +4. ๊ฒŒ์ž„ ํŒ์— ์ ํžŒ ์ˆซ์ž๋Š” 1 ์ด์ƒ 9 ์ดํ•˜์˜ ์ •์ˆ˜์ด๋‹ค. + +5. ์ˆ˜์‹์„ ์™„์„ฑํ•  ๋•Œ ๊ฐ ์—ฐ์‚ฐ์ž ์นด๋“œ๋ฅผ ๋ชจ๋‘ ์‚ฌ์šฉํ•ด์•ผ ํ•œ๋‹ค.. + +6. ์ˆซ์ž์™€ ์ˆซ์ž ์‚ฌ์ด์—๋Š” ์—ฐ์‚ฐ์ž๊ฐ€ 1 ๊ฐœ๋งŒ ๋“ค์–ด๊ฐ€์•ผ ํ•œ๋‹ค. + +7. ์™„์„ฑ๋œ ์ˆ˜์‹์„ ๊ณ„์‚ฐํ•  ๋•Œ ์—ฐ์‚ฐ์ž์˜ ์šฐ์„  ์ˆœ์œ„๋Š” ๊ณ ๋ คํ•˜์ง€ ์•Š๊ณ , ์™ผ์ชฝ์—์„œ ์˜ค๋ฅธ์ชฝ์œผ๋กœ ์ฐจ๋ก€๋Œ€๋กœ ๊ณ„์‚ฐํ•œ๋‹ค. + +8. ๋‚˜๋ˆ—์…ˆ์„ ๊ณ„์‚ฐ ํ•  ๋•Œ ์†Œ์ˆ˜์  ์ดํ•˜๋Š” ๋ฒ„๋ฆฐ๋‹ค. + +9. ์ž…๋ ฅ์œผ๋กœ ์ฃผ์–ด์ง€๋Š” ์ˆซ์ž์˜ ์ˆœ์„œ๋Š” ๋ณ€๊ฒฝํ•  ์ˆ˜ ์—†๋‹ค. + +10. ์—ฐ์‚ฐ ์ค‘์˜ ๊ฐ’์€ -100,000,000 ์ด์ƒ 100,000,000 ์ดํ•˜์ž„์ด ๋ณด์žฅ๋œ๋‹ค. + +# ์ž…๋ ฅ + +์ž…๋ ฅ์˜ ๋งจ ์ฒซ ์ค„์—๋Š” ์ด ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์˜ ๊ฐœ์ˆ˜ T ๊ฐ€ ์ฃผ์–ด์ง€๊ณ , + +๊ทธ ๋‹ค์Œ ์ค„๋ถ€ํ„ฐ T ๊ฐœ์˜ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. + +๊ฐ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์˜ ์ฒซ ๋ฒˆ์งธ ์ค„์—๋Š” ์ˆซ์ž์˜ ๊ฐœ์ˆ˜ N ์ด ์ฃผ์–ด์ง„๋‹ค. + +๋‹ค์Œ ์ค„์—๋Š” '+', '-', '*', '/' ์ˆœ์„œ๋Œ€๋กœ ์—ฐ์‚ฐ์ž ์นด๋“œ์˜ ๊ฐœ์ˆ˜๊ฐ€ ๊ณต๋ฐฑ์„ ์‚ฌ์ด์— ๋‘๊ณ  ์ฃผ์–ด์ง„๋‹ค. + +๋‹ค์Œ ์ค„์—๋Š” ์ˆ˜์‹์— ๋“ค์–ด๊ฐ€๋Š” N ๊ฐœ์˜ ์ˆซ์ž๊ฐ€ ์ˆœ์„œ๋Œ€๋กœ ๊ณต๋ฐฑ์„ ์‚ฌ์ด์— ๋‘๊ณ  ์ฃผ์–ด์ง„๋‹ค. + +# ์ถœ๋ ฅ + +ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค ๊ฐœ์ˆ˜๋งŒํผ T ๊ฐœ์˜ ์ค„์— ๊ฐ๊ฐ์˜ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์— ๋Œ€ํ•œ ๋‹ต์„ ์ถœ๋ ฅํ•œ๋‹ค. + +๊ฐ ์ค„์€ "#t" ๋กœ ์‹œ์ž‘ํ•˜๊ณ  ๊ณต๋ฐฑ์„ ํ•˜๋‚˜ ๋‘” ๋‹ค์Œ ์ •๋‹ต์„ ์ถœ๋ ฅํ•œ๋‹ค. ( t ๋Š” 1 ๋ถ€ํ„ฐ ์‹œ์ž‘ํ•˜๋Š” ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์˜ ๋ฒˆํ˜ธ์ด๋‹ค. ) + +์ •๋‹ต์€ ์—ฐ์‚ฐ์ž ์นด๋“œ๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ๋งŒ๋“ค ์ˆ˜ ์žˆ๋Š” ์ˆ˜์‹์œผ๋กœ ์–ป์€ ๊ฒฐ๊ณผ๊ฐ’ ์ค‘ ์ตœ๋Œ“๊ฐ’๊ณผ ์ตœ์†Ÿ๊ฐ’์˜ ์ฐจ์ด์ด๋‹ค. + +# ํ’€์ด + +์ฃผ์–ด์ง„ ์ˆซ์ž ์‚ฌ์ด์— ์ฃผ์–ด์ง„ ์—ฐ์‚ฐ์ž๋ฅผ ๋„ฃ์–ด ์ˆ˜๋ฅผ ๊ณ„์‚ฐํ•˜๊ณ , ๊ทธ ์ˆ˜์˜ ์ตœ๋Œ€์™€ ์ตœ์†Œ์˜ ์ฐจ์ด๋ฅผ ์ถœ๋ ฅํ•˜๋Š” ๋ฌธ์ œ์ด๋‹ค. + +๋ฌด์‹ฌ์ฝ” ํ’€์ด๋ฅผ ์ƒ๊ฐํ•˜๋ฉด ๋ฐ›์€ ์—ฐ์‚ฐ์ž๋ฅผ ๋ชจ๋“  ์œ„์น˜์— ๋„ฃ์–ด๊ฐ€๋ฉด์„œ ๊ณ„์‚ฐํ•˜๋ฉด ๋˜๋Š” ๊ฒƒ์ด ์•„๋‹Œ๊ฐ€ ํ•  ์ˆ˜ ์žˆ์ง€๋งŒ, ์ด ๊ฒฝ์šฐ ์ค‘๋ณต ๊ณ„์‚ฐ์ด ๋นˆ๋ฒˆํ•˜๊ฒŒ ๋ฐœ์ƒํ•  ์ˆ˜ ์žˆ๋‹ค. + +์˜ˆ๋ฅผ ๋“ค์–ด + + + ๋กœ ์—ฐ์‚ฐ์ž๊ฐ€ ์ฃผ์–ด์ง„๋‹ค๋ฉด ์–ด๋–ค ๊ฒฝ์šฐ์ด๋“  ๊ฐ™์€ ๊ฐ’์ด ๋‚˜์˜ฌ ๊ฒƒ์ด๋‹ค. ํ•˜์ง€๋งŒ ์—ฐ์‚ฐ์ž๋ฅผ ๋ชจ๋“  ์œ„์น˜์— ๋„ฃ์–ด๊ฐ€๋ฉด์„œ ๊ณ„์‚ฐํ•  ๊ฒฝ์šฐ $3 * 2 * 1 = 6$ ๋ฒˆ์˜ ์ค‘๋ณต ๊ณ„์‚ฐ์„ ํ•˜๊ฒŒ ๋œ๋‹ค. + +์ด๋Ÿฐ ๊ฒฝ์šฐ๋ฅผ ๋ฐฉ์ง€ํ•˜๊ธฐ ์œ„ํ•ด ์—ฐ์‚ฐ์ž์˜ ์ˆ˜๋ฅผ ์„ธ๋Š” ๋”•์…”๋„ˆ๋ฆฌ๋ฅผ ๋งŒ๋“ค๊ณ , ๋ช‡ ๋ฒˆ ์‚ฌ์šฉํ–ˆ๋Š”์ง€ ํ™•์ธํ•˜์˜€๋‹ค. ์ด๋ ‡๊ฒŒ ํ•˜๋ฉด ๊ฐ ์—ฐ์‚ฐ์ž๋งˆ๋‹ค ๋‹ค๋ฅธ ๊ฒƒ์œผ๋กœ ์ƒ๊ฐํ•˜์ง€ ์•Š๊ธฐ ๋•Œ๋ฌธ์— ์œ„์—์„œ ์–ธ๊ธ‰ํ•œ ์ค‘๋ณต ์—ฐ์‚ฐ์„ ์—†์•จ ์ˆ˜ ์žˆ๋‹ค. + +## ์ฝ”๋“œ + +```py +# ๊ณ„์‚ฐ + +def calculate(num1, num2, operator): + + if operator == '+': + num1 += num2 + elif operator == '-': + num1 -= num2 + elif operator == '*': + num1 *= num2 + elif operator == '/': + num1 = int(num1 / num2) + return num1 + +# ์ˆ˜์‹ ์™„์„ฑ +def search_expression(i, result): + if i == n: + global max_num, min_num + max_num = max(max_num, result) + min_num = min(min_num, result) + return + + for operator in operators: + if operator_dict[operator] > 0: + operator_dict[operator] -= 1 + search_expression(i + 1, calculate(result, nums[i+1], operator)) + operator_dict[operator] += 1 + + + +test_case = int(input()) + +for t in range(test_case): + n = int(input()) - 1 + operators = ['+', '-', '*', '/'] + operator_dict = {operator: cnt for operator, cnt in zip(operators, map(int, input().split()))} + + nums = list(map(int, input().split())) + + max_num = float('-inf') + min_num = float('inf') + result_dict = {} + visited = [] + + search_expression(0, nums[0]) + + print(f"#{t + 1} {max_num - min_num}") +``` \ No newline at end of file diff --git a/_posts/2024-08-10-swea_2115.markdown b/_posts/2024-08-10-swea_2115.markdown new file mode 100644 index 00000000000..b2a66a7d8d7 --- /dev/null +++ b/_posts/2024-08-10-swea_2115.markdown @@ -0,0 +1,189 @@ +--- +title: "[SWEA]๋ฒŒ๊ฟ€ ์ฑ„์ทจ - 2115 (๋ชจ์˜ ์—ญ๋Ÿ‰ ํ…Œ์ŠคํŠธ)" +author: kwon +date: 2024-08-10T14:00:00 +0900 +categories: [swea, ๋ชจ์˜์—ญ๋Ÿ‰] +tags: [dfs, subset, dynamic programing] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 6 ์ดˆ | ํž™ ์ •์  ๋ฉ”๋ชจ๋ฆฌ: 256 MB / ์Šคํƒ ๋ฉ”๋ชจ๋ฆฌ 1MB | + +# ๋ฌธ์ œ + + +N*N ๊ฐœ์˜ ๋ฒŒํ†ต์ด ์ •์‚ฌ๊ฐํ˜• ๋ชจ์–‘์œผ๋กœ ๋ฐฐ์น˜๋˜์–ด ์žˆ๋‹ค. + +๊ฐ ์นธ์˜ ์ˆซ์ž๋Š” ๊ฐ๊ฐ์˜ ๋ฒŒํ†ต์— ์žˆ๋Š” ๊ฟ€์˜ ์–‘์„ ๋‚˜ํƒ€๋‚ด๋ฉฐ, ๊ฟ€์˜ ์–‘์€ ์„œ๋กœ ๋‹ค๋ฅผ ์ˆ˜ ์žˆ๋‹ค. + +๊ฐ ๋ฒŒํ†ต์— ์žˆ๋Š” ๊ฟ€์˜ ์–‘์ด ์ฃผ์–ด์กŒ์„ ๋•Œ, ๋‹ค์Œ๊ณผ ๊ฐ™์€ ๊ณผ์ •์œผ๋กœ ๋ฒŒ๊ฟ€์„ ์ฑ„์ทจํ•˜์—ฌ ์ตœ๋Œ€ํ•œ ๋งŽ์€ ์ˆ˜์ต์„ ์–ป์œผ๋ ค๊ณ  ํ•œ๋‹ค. + +1. ๋‘ ๋ช…์˜ ์ผ๊พผ์ด ์žˆ๋‹ค. ๊ฟ€์„ ์ฑ„์ทจํ•  ์ˆ˜ ์žˆ๋Š” ๋ฒŒํ†ต์˜ ์ˆ˜ M์ด ์ฃผ์–ด์งˆ ๋•Œ, + ๊ฐ๊ฐ์˜ ์ผ๊พผ์€ ๊ฐ€๋กœ๋กœ ์—ฐ์†๋˜๋„๋ก M๊ฐœ์˜ ๋ฒŒํ†ต์„ ์„ ํƒํ•˜๊ณ , ์„ ํƒํ•œ ๋ฒŒํ†ต์—์„œ ๊ฟ€์„ ์ฑ„์ทจํ•  ์ˆ˜ ์žˆ๋‹ค. + ๋‹จ, ๋‘ ๋ช…์˜ ์ผ๊พผ์ด ์„ ํƒํ•œ ๋ฒŒํ†ต์€ ์„œ๋กœ ๊ฒน์น˜๋ฉด ์•ˆ ๋œ๋‹ค. + +2. ๋‘ ๋ช…์˜ ์ผ๊พผ์€ ์„ ํƒํ•œ ๋ฒŒํ†ต์—์„œ ๊ฟ€์„ ์ฑ„์ทจํ•˜์—ฌ ์šฉ๊ธฐ์— ๋‹ด์•„์•ผ ํ•œ๋‹ค. + ๋‹จ, ์„œ๋กœ ๋‹ค๋ฅธ ๋ฒŒํ†ต์—์„œ ์ฑ„์ทจํ•œ ๊ฟ€์ด ์„ž์ด๊ฒŒ ๋˜๋ฉด ์ƒํ’ˆ๊ฐ€์น˜๊ฐ€ ๋–จ์ด์ง€๊ฒŒ ๋˜๋ฏ€๋กœ, ํ•˜๋‚˜์˜ ๋ฒŒํ†ต์—์„œ ์ฑ„์ทจํ•œ ๊ฟ€์€ ํ•˜๋‚˜์˜ ์šฉ๊ธฐ์— ๋‹ด์•„์•ผ ํ•œ๋‹ค. + ํ•˜๋‚˜์˜ ๋ฒŒํ†ต์—์„œ ๊ฟ€์„ ์ฑ„์ทจํ•  ๋•Œ, ์ผ๋ถ€๋ถ„๋งŒ ์ฑ„์ทจํ•  ์ˆ˜ ์—†๊ณ  ๋ฒŒํ†ต์— ์žˆ๋Š” ๋ชจ๋“  ๊ฟ€์„ ํ•œ๋ฒˆ์— ์ฑ„์ทจํ•ด์•ผ ํ•œ๋‹ค. + ๋‘ ์ผ๊พผ์ด ์ฑ„์ทจํ•  ์ˆ˜ ์žˆ๋Š” ๊ฟ€์˜ ์ตœ๋Œ€ ์–‘์€ C ์ด๋‹ค. + +3. ์ฑ„์ทจํ•œ ๊ฟ€์€ ์‹œ์žฅ์—์„œ ํŒ”๋ฆฌ๊ฒŒ ๋œ๋‹ค. ์ด๋•Œ ํ•˜๋‚˜์˜ ์šฉ๊ธฐ์— ์žˆ๋Š” ๊ฟ€์˜ ์–‘์ด ๋งŽ์„์ˆ˜๋ก ์ƒํ’ˆ๊ฐ€์น˜๊ฐ€ ๋†’์•„, ๊ฐ ์šฉ๊ธฐ์— ์žˆ๋Š” ๊ฟ€์˜ ์–‘์˜ ์ œ๊ณฑ๋งŒํผ์˜ ์ˆ˜์ต์ด ์ƒ๊ธด๋‹ค. + + +๋ฒŒํ†ต๋“ค์˜ ํฌ๊ธฐ N๊ณผ ๋ฒŒํ†ต์— ์žˆ๋Š” ๊ฟ€์˜ ์–‘์— ๋Œ€ํ•œ ์ •๋ณด, ์„ ํƒํ•  ์ˆ˜ ์žˆ๋Š” ๋ฒŒํ†ต์˜ ๊ฐœ์ˆ˜ M, ๊ฟ€์„ ์ฑ„์ทจํ•  ์ˆ˜ ์žˆ๋Š” ์ตœ๋Œ€ ์–‘ C๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. + +์ด๋•Œ ๋‘ ์ผ๊พผ์ด ๊ฟ€์„ ์ฑ„์ทจํ•˜์—ฌ ์–ป์„ ์ˆ˜ ์žˆ๋Š” ์ˆ˜์ต์˜ ํ•ฉ์ด ์ตœ๋Œ€๊ฐ€ ๋˜๋Š” ๊ฒฝ์šฐ๋ฅผ ์ฐพ๊ณ , ๊ทธ ๋•Œ์˜ ์ตœ๋Œ€ ์ˆ˜์ต์„ ์ถœ๋ ฅํ•˜๋Š” ํ”„๋กœ๊ทธ๋žจ์„ ์ž‘์„ฑํ•˜๋ผ. + +## ์ œ์•ฝ ์‚ฌํ•ญ + +1. ์‹œ๊ฐ„์ œํ•œ : ์ตœ๋Œ€ 50๊ฐœ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค๋ฅผ ๋ชจ๋‘ ํ†ต๊ณผํ•˜๋Š”๋ฐ, C/C++/Java ๋ชจ๋‘ 3์ดˆ. + +2. ๋ฒŒํ†ต๋“ค์˜ ํฌ๊ธฐ N์€ 3 ์ด์ƒ 10 ์ดํ•˜์˜ ์ •์ˆ˜์ด๋‹ค. (3 โ‰ค N โ‰ค 10) + +3. ์„ ํƒํ•  ์ˆ˜ ์žˆ๋Š” ๋ฒŒํ†ต์˜ ๊ฐœ์ˆ˜ M์€ 1 ์ด์ƒ 5 ์ดํ•˜์˜ ์ •์ˆ˜์ด๋‹ค. (1 โ‰ค M โ‰ค 5) + +4. ์„ ํƒํ•  ์ˆ˜ ์žˆ๋Š” ๋ฒŒํ†ต์˜ ๊ฐœ์ˆ˜ M์€ ๋ฐ˜๋“œ์‹œ N ์ดํ•˜๋กœ๋งŒ ์ฃผ์–ด์ง„๋‹ค. + +5. ๊ฟ€์„ ์ฑ„์ทจํ•  ์ˆ˜ ์žˆ๋Š” ์ตœ๋Œ€ ์–‘ C๋Š” 10 ์ด์ƒ 30 ์ดํ•˜์˜ ์ •์ˆ˜์ด๋‹ค. (10 โ‰ค C โ‰ค 30) + +6. ํ•˜๋‚˜์˜ ๋ฒŒํ†ต์—์„œ ์ฑ„์ทจํ•  ์ˆ˜ ์žˆ๋Š” ๊ฟ€์˜ ์–‘์€ 1 ์ด์ƒ 9 ์ดํ•˜์˜ ์ •์ˆ˜์ด๋‹ค. + +7. ํ•˜๋‚˜์˜ ๋ฒŒํ†ต์—์„œ ์ผ๋ถ€๋ถ„์˜ ๊ฟ€๋งŒ ์ฑ„์ทจํ•  ์ˆ˜ ์—†๊ณ , ๋ฒŒํ†ต์— ์žˆ๋Š” ๋ชจ๋“  ๊ฟ€์„ ํ•œ๋ฒˆ์— ์ฑ„์ทจํ•ด์•ผ ํ•œ๋‹ค. + +# ์ž…๋ ฅ + +์ž…๋ ฅ์˜ ๋งจ ์ฒซ ์ค„์—๋Š” ์ด ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์˜ ๊ฐœ์ˆ˜ T๊ฐ€ ์ฃผ์–ด์ง€๊ณ , ๊ทธ ๋‹ค์Œ ์ค„๋ถ€ํ„ฐ T๊ฐœ์˜ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. + +๊ฐ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์˜ ์ฒซ ๋ฒˆ์งธ ์ค„์—๋Š” ๋ฒŒํ†ต๋“ค์˜ ํฌ๊ธฐ N, ์„ ํƒํ•  ์ˆ˜ ์žˆ๋Š” ๋ฒŒํ†ต์˜ ๊ฐœ์ˆ˜ M, ๊ฟ€์„ ์ฑ„์ทจํ•  ์ˆ˜ ์žˆ๋Š” ์ตœ๋Œ€ ์–‘ C๊ฐ€ ์ฐจ๋ก€๋กœ ์ฃผ์–ด์ง„๋‹ค. + +๊ทธ ๋‹ค์Œ ์ค„๋ถ€ํ„ฐ N*N ๊ฐœ์˜ ๋ฒŒํ†ต์—์„œ ์ฑ„์ทจํ•  ์ˆ˜ ์žˆ๋Š” ๊ฟ€์˜ ์–‘์— ๋Œ€ํ•œ ์ •๋ณด๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. + +# ์ถœ๋ ฅ + +ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์˜ ๊ฐœ์ˆ˜๋งŒํผ T์ค„์— T๊ฐœ์˜ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค ๊ฐ๊ฐ์— ๋Œ€ํ•œ ๋‹ต์„ ์ถœ๋ ฅํ•œ๋‹ค. + +๊ฐ ์ค„์€ "#x"๋กœ ์‹œ์ž‘ํ•˜๊ณ  ๊ณต๋ฐฑ์„ ํ•˜๋‚˜ ๋‘” ๋‹ค์Œ ์ •๋‹ต์„ ์ถœ๋ ฅํ•œ๋‹ค. (x๋Š” 1๋ถ€ํ„ฐ ์‹œ์ž‘ํ•˜๋Š” ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์˜ ๋ฒˆํ˜ธ์ด๋‹ค) + +์ถœ๋ ฅํ•ด์•ผ ํ•  ์ •๋‹ต์€ ๋‘ ์ผ๊พผ์ด ๊ฟ€์„ ์ฑ„์ทจํ•˜์—ฌ ์–ป์„ ์ˆ˜ ์žˆ๋Š” ์ตœ๋Œ€ ์ˆ˜์ต์ด๋‹ค. + +# ํ’€์ด + +์ผ์ข…์˜ ์™„์ „ํƒ์ƒ‰์„ ๊ธฐ๋ฐ˜์œผ๋กœ ๊ฐ€์ง€์น˜๊ธฐ๋ฅผ ํ•ด์•ผ ํ•œ๋‹ค๊ณ  ์ƒ๊ฐํ–ˆ๋‹ค. + +๋‘ ์‚ฌ๋žŒ์ด ๋ฒŒํ†ต์„ ์„ ํƒํ•  ์ˆ˜ ์žˆ๋Š” ๋ฐฉ๋ฒ•์„ ๋ชจ๋‘ ํƒ์ƒ‰ํ•˜๋Š” ๊ฒƒ์„ ๊ธฐ๋ฐ˜์œผ๋กœ ์ง„ํ–‰ํ•œ๋‹ค. ํƒ์ƒ‰ํ•˜๋ฉด์„œ ๊ณ ๋ฅธ ๋ฒŒํ†ต์— ๋Œ€ํ•ด ๋ถ€๋ถ„์ง‘ํ•ฉ์„ ๋งŒ๋“ค์–ด ์ œ๊ณฑ์˜ ํ•ฉ์ด ์ตœ๋Œ€๊ฐ€ ๋˜๋Š” ๊ฒฝ์šฐ๋ฅผ ์ฐพ๋Š”๋‹ค. ์ตœ๋Œ€๊ฐ€ ๋˜๋Š” ์ œ๊ณฑ์˜ ํ•ฉ์ด ๋ฐ”๋กœ ๊ณ ๋ฅธ ๋ฒŒํ†ต๋“ค์—์„œ ๋‚˜์˜ฌ ์ˆ˜ ์žˆ๋Š” ์ตœ๋Œ€ ์ˆ˜์ต์ด ๋œ๋‹ค. + +์ด ๋•Œ ๊ฟ€์˜ ์–‘์ด `c`๋ฅผ ๋„˜๋Š” ๊ฒฝ์šฐ๋Š” ๋” ์ด์ƒ ๋ถ€๋ถ„ ์ง‘ํ•ฉ์„ ๋งŒ๋“ค ํ•„์š”๊ฐ€ ์—†๊ธฐ ๋•Œ๋ฌธ์— ์—ฐ์‚ฐํ•˜์ง€ ์•Š๋Š”๋‹ค. + +## ์žฌ๊ท€๋ฅผ ํ†ตํ•œ ํ’€์ด + +์ฒซ ๋ฒˆ์งธ๋กœ ์ƒ๊ฐํ•œ ๋ฐฉ๋ฒ•์€ ์žฌ๊ท€๋กœ ๋ถ€๋ถ„ ์ง‘ํ•ฉ์„ ๊ทธ๋Œ€๋กœ ๊ตฌํ•ด์„œ ๊ณ„์‚ฐํ•˜๋Š” ๊ฒƒ์ด๋‹ค. ๋‹จ, ์•ž์„œ ๋งํ•œ ๊ฐ€์ง€์น˜๊ธฐ๋ฅผ ํ•˜๊ธฐ ์œ„ํ•ด `c`๋ฅผ ๋„˜๋Š” ๊ฒฝ์šฐ๋Š” ๋„˜์–ด๊ฐ€๋„๋ก ์ž‘์„ฑํ•˜์˜€๋‹ค. + +### ์ฝ”๋“œ + +```py +test_case = int(input()) + +def make_subset(arr, depth, honey_num = 0, selected = []): + if depth == -1: + subsets.append(selected) + return + for i in range(2): + if i == 0: + make_subset(arr, depth - 1, honey_num, selected) + else: + if honey_num + arr[depth] > c: + continue + make_subset(arr, depth - 1, honey_num + arr[depth], selected + [arr[depth]]) + +def calcul_max_cost(subsets): + max_cost = 0 + for subset in subsets: + cost = 0 + for ele in subset: + cost += ele ** 2 + + max_cost = max(max_cost, cost) + + return max_cost + +for t in range(test_case): + n, m, c = map(int, input().split()) + honey_map = [list(map(int, input().split())) for _ in range(n)] + total_max = 0 + + for fst_i in range(n): + for fst_j in range(n - m + 1): + + subsets = [] + make_subset(honey_map[fst_i][fst_j:fst_j + m], m - 1) + fst_max = calcul_max_cost(subsets) + + for snd_i in range(n): + start = 0 + if snd_i == fst_i: + start = fst_j + m + for snd_j in range(start, n - m + 1): + subsets = [] + make_subset(honey_map[snd_i][snd_j:snd_j + m], m - 1) + snd_max = calcul_max_cost(subsets) + + total_max = max(total_max, fst_max + snd_max) + + print(f"#{t + 1} {total_max}") +``` + +## DP๋ฅผ ํ†ตํ•œ ํ’€์ด + +์ด ๋ฌธ์ œ๋ฅผ ๋‹ค์‹œ ์ž˜ ์ƒ๊ฐํ•ด๋ณด๋ฉด ์„ ํƒํ•œ ๊ฟ€ํ†ต ๋‚ด์—์„œ ๊ฟ€ํ†ต์„ ๊ณ ๋ฅด๊ฑฐ๋‚˜(1) ์•ˆ๊ณ ๋ฅด๋ฉด์„œ(0) ์ฃผ์–ด์ง„ `c`(์šฉ๋Ÿ‰) ๋‚ด์—์„œ ์ตœ๋Œ€์˜ ์ˆ˜์ต(์ด๋“)์„ ์–ป๋Š” ๊ฒƒ์ด ๋ชฉ์ ์ด๋‹ค. ์ฆ‰, ์ด ๋ฌธ์ œ๋Š” 0-1 ๋ฐฐ๋‚ญ ๋ฌธ์ œ์™€ ์œ ์‚ฌํ•˜๋ฏ€๋กœ DP๋กœ๋„ ํ’€์ดํ•  ์ˆ˜ ์žˆ๋‹ค. + +์ด์ „์— ํ‘ผ [ํ–„๋ฒ„๊ฑฐ ๋‹ค์ด์–ดํŠธ](/posts/swea_5215/)์—์„œ ์‚ฌ์šฉํ•œ DP์˜ ์ง„ํ–‰ ๋ฐฉ์‹๊ณผ ์œ ์‚ฌํ•˜๊ฒŒ ์ˆ˜์ต์„ ์ฐพ๋Š” ํ’€์ด์ด๋‹ค. + +๋‹ค๋ฅธ ์ ์€ ๊ฟ€ ์–‘์˜ ํ•ฉ์ด `c`๋ฅผ ๋„˜์„ ๊ฒฝ์šฐ DP์— ๋ฐ˜์˜ํ•˜์ง€ ์•Š๋Š”๋‹ค๋Š” ๊ฒƒ๊ณผ, ์ œ๊ณฑ์˜ ํ•ฉ์ด ํฐ ๊ฒƒ์„ ๊ธฐ์ค€์œผ๋กœ PD์— ์—…๋ฐ์ดํŠธํ•œ๋‹ค๋Š” ๊ฒƒ์ด๋‹ค. + +๋ถ€๋ถ„ ์ง‘ํ•ฉ์„ ๋งŒ๋“ค์–ด ์ˆ˜์ต์„ ๊ณ„์‚ฐํ•˜๋Š” ๋ถ€๋ถ„์ด DP๋กœ ๋ฐ”๋€ ๊ฒƒ๋งŒ ์ œ์™ธํ•˜๋ฉด ์žฌ๊ท€๋ฅผ ํ†ตํ•œ ํ’€์ด์™€ ์™„์ „ํžˆ ๋™์ผํ•˜๋‹ค. + +```py +def max_subset_sum(arr): + dp = [[0, 0] for _ in range(c + 1)] + + for num in arr: + for j in range(c, num - 1, -1): + if dp[j - num][0] + num > c: + continue + next_sq_value = dp[j - num][1] + num ** 2 + if next_sq_value > dp[j][1]: + dp[j][0] = dp[j - num][0] + num + dp[j][1] = next_sq_value + _, max_sum = max(dp, key=lambda x: x[1]) + return max_sum + +test_case = int(input()) + +for t in range(test_case): + n, m, c = map(int, input().split()) + honey_map = [list(map(int, input().split())) for _ in range(n)] + total_max = 0 + + for fst_i in range(n): + for fst_j in range(n - m + 1): + + fst_max = max_subset_sum(honey_map[fst_i][fst_j:fst_j + m]) + + for snd_i in range(n): + start = 0 + if snd_i == fst_i: + start = fst_j + m + for snd_j in range(start, n - m + 1): + snd_max = max_subset_sum(honey_map[snd_i][snd_j:snd_j + m]) + + total_max = max(total_max, fst_max + snd_max) + + print(f"#{t + 1} {total_max}") +``` + +๊ทผ๋ฐ ์ƒ๊ฐ๋ณด๋‹ค ์‹คํ–‰ ์‹œ๊ฐ„์ด **Dynamic**ํ•˜๊ฒŒ ์ค„์–ด๋“ค์ง€ ์•Š์•„ ์‚ด์ง ๊น€์ƒœ๋‹ค. + +![](/posting_imgs/swea2115.png) \ No newline at end of file diff --git a/_posts/2024-08-14-swea_2112.markdown b/_posts/2024-08-14-swea_2112.markdown new file mode 100644 index 00000000000..f16f7722269 --- /dev/null +++ b/_posts/2024-08-14-swea_2112.markdown @@ -0,0 +1,177 @@ +--- +title: "[SWEA]๋ณดํ˜ธ ํ•„๋ฆ„ - 2112 (๋ชจ์˜ ์—ญ๋Ÿ‰ ํ…Œ์ŠคํŠธ)" +author: kwon +date: 2024-08-14T14:00:00 +0900 +categories: [swea, ๋ชจ์˜์—ญ๋Ÿ‰] +tags: [dfs, backtracking] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 15 ์ดˆ | ํž™ ์ •์  ๋ฉ”๋ชจ๋ฆฌ: 256 MB / ์Šคํƒ ๋ฉ”๋ชจ๋ฆฌ 1MB | + +# ๋ฌธ์ œ + +์„ฑ๋Šฅ์ด ์šฐ์ˆ˜ํ•œ ๋ณดํ˜ธ ํ•„๋ฆ„์„ ์ œ์ž‘ํ•˜๋ ค๊ณ  ํ•œ๋‹ค. + +๋ณดํ˜ธ ํ•„๋ฆ„์€ ์—ท์€ ํˆฌ๋ช…ํ•œ ๋ง‰์„ D์žฅ ์Œ“์•„์„œ ์ œ์ž‘๋œ๋‹ค. + +๋ง‰์€ ๋™์ผํ•œ ํฌ๊ธฐ๋ฅผ ๊ฐ€์ง„ ๋ฐ”(bar) ๋ชจ์–‘์˜ ์…€๋“ค์ด ๊ฐ€๋กœ ๋ฐฉํ–ฅ์œผ๋กœ W๊ฐœ ๋ถ™์—ฌ์„œ ๋งŒ๋“ค์–ด์ง„๋‹ค. + +์ด๋ ‡๊ฒŒ ์ œ์ž‘๋œ ํ•„๋ฆ„์€ ๋‘๊ป˜ D, ๊ฐ€๋กœ ํฌ๊ธฐ W์˜ ๋ณดํ˜ธ ํ•„๋ฆ„์ด๋ผ๊ณ  ํ•œ๋‹ค. + + + +๊ฐ ์…€๋“ค์€ ํŠน์„ฑ A ๋˜๋Š” ํŠน์„ฑ B๋ฅผ ๊ฐ€์ง€๊ณ  ์žˆ๋‹ค. ๋ณดํ˜ธ ํ•„๋ฆ„์˜ ์„ฑ๋Šฅ์€ ์…€๋“ค์˜ ํŠน์„ฑ์ด ์–ด๋–ป๊ฒŒ ๋ฐฐ์น˜๋จ์— ๋”ฐ๋ผ ๊ฒฐ์ •๋œ๋‹ค. + + +๋ณดํ˜ธ ํ•„๋ฆ„์˜ ์„ฑ๋Šฅ์„ ๊ฒ€์‚ฌํ•˜๊ธฐ ์œ„ํ•ด ํ•ฉ๊ฒฉ๊ธฐ์ค€ K๋ผ๋Š” ๊ฐ’์„ ์‚ฌ์šฉํ•œ๋‹ค. + +์ถฉ๊ฒฉ์€ ๋ณดํ˜ธ ํ•„๋ฆ„ ๋‹จ๋ฉด์˜ ์„ธ๋กœ ๋ฐฉํ–ฅ์œผ๋กœ ๊ฐ€ํ•ด์ง€๋ฏ€๋กœ, ์„ธ๋กœ ๋ฐฉํ–ฅ ์…€๋“ค์˜ ํŠน์„ฑ์ด ์ค‘์š”ํ•˜๋‹ค. + +๋‹จ๋ฉด์˜ ๋ชจ๋“  ์„ธ๋กœ๋ฐฉํ–ฅ์— ๋Œ€ํ•ด์„œ ๋™์ผํ•œ ํŠน์„ฑ์˜ ์…€๋“ค์ด K๊ฐœ ์ด์ƒ ์—ฐ์†์ ์œผ๋กœ ์žˆ๋Š” ๊ฒฝ์šฐ์—๋งŒ ์„ฑ๋Šฅ๊ฒ€์‚ฌ๋ฅผ ํ†ต๊ณผํ•˜๊ฒŒ ๋œ๋‹ค. + + +์„ฑ๋Šฅ๊ฒ€์‚ฌ์— ํ†ต๊ณผํ•˜๊ธฐ ์œ„ํ•ด์„œ ์•ฝํ’ˆ์„ ์‚ฌ์šฉํ•˜์—ฌ์•ผ ํ•œ๋‹ค. + +์•ฝํ’ˆ์€ ๋ง‰ ๋ณ„๋กœ ํˆฌ์ž…ํ•  ์ˆ˜ ์žˆ์œผ๋ฉฐ ์ด ๊ฒฝ์šฐ ํˆฌ์ž…ํ•˜๋Š” ๋ง‰์˜ ๋ชจ๋“  ์…€๋“ค์€ ํ•˜๋‚˜์˜ ํŠน์„ฑ์œผ๋กœ ๋ณ€๊ฒฝ๋œ๋‹ค. + +ํŠน์ • ๋ง‰์— ์•ฝํ’ˆ A๋ฅผ ํˆฌ์ž…ํ•˜๋ฉด ๋ง‰ ๋‚ด์˜ ๋ชจ๋“  ์…€๋“ค์ด ํŠน์„ฑ A๋กœ ๋ณ€๊ฒฝ๋˜๋ฉฐ, ์•ฝํ’ˆ B๋ฅผ ๋„ฃ๊ฒŒ ๋˜๋ฉด ํŠน์„ฑ์ด ๋ชจ๋‘ ํŠน์„ฑ B๋กœ ๋ณ€๊ฒฝ๋œ๋‹ค. + + +๋‘๊ป˜ D, ๊ฐ€๋กœํฌ๊ธฐ W์ธ ๋ณดํ˜ธ ํ•„๋ฆ„ ๋‹จ๋ฉด์˜ ์ •๋ณด์™€ ํ•ฉ๊ฒฉ๊ธฐ์ค€ K๊ฐ€ ์ฃผ์–ด์กŒ์„ ๋•Œ, ์•ฝํ’ˆ ํˆฌ์ž… ํšŸ์ˆ˜๋ฅผ ์ตœ์†Œ๋กœ ํ•˜์—ฌ ์„ฑ๋Šฅ๊ฒ€์‚ฌ๋ฅผ ํ†ต๊ณผํ•  ์ˆ˜ ์žˆ๋Š” ๋ฐฉ๋ฒ•์„ ์ฐพ๊ณ , + +์ด๋•Œ์˜ ์•ฝํ’ˆ ํˆฌ์ž… ํšŸ์ˆ˜๋ฅผ ์ถœ๋ ฅํ•˜๋ผ. + +์•ฝํ’ˆ์„ ํˆฌ์ž…ํ•˜์ง€ ์•Š๊ณ ๋„ ์„ฑ๋Šฅ๊ฒ€์‚ฌ๋ฅผ ํ†ต๊ณผํ•˜๋Š” ๊ฒฝ์šฐ์—๋Š” 0์„ ์ถœ๋ ฅํ•œ๋‹ค. + + +## ์ œ์•ฝ์‚ฌํ•ญ + +1. ์‹œ๊ฐ„์ œํ•œ : ์ตœ๋Œ€ 50๊ฐœ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค๋ฅผ ๋ชจ๋‘ ํ†ต๊ณผํ•˜๋Š”๋ฐ, C/C++/Java ๋ชจ๋‘ 5์ดˆ + +2. ๋ณดํ˜ธ ํ•„๋ฆ„์˜ ๋‘๊ป˜ D๋Š” 3์ด์ƒ 13์ดํ•˜์˜ ์ •์ˆ˜์ด๋‹ค. (3โ‰คDโ‰ค13) + +3. ๋ณดํ˜ธ ํ•„๋ฆ„์˜ ๊ฐ€๋กœํฌ๊ธฐ W๋Š” 1์ด์ƒ 20์ดํ•˜์˜ ์ •์ˆ˜์ด๋‹ค. (1โ‰คWโ‰ค20) + +4. ํ•ฉ๊ฒฉ๊ธฐ์ค€ K๋Š” 1์ด์ƒ D์ดํ•˜์˜ ์ •์ˆ˜์ด๋‹ค. (1โ‰คKโ‰คD) + +5. ์…€์ด ๊ฐ€์งˆ ์ˆ˜ ์žˆ๋Š” ํŠน์„ฑ์€ A, B ๋‘ ๊ฐœ๋งŒ ์กด์žฌํ•œ๋‹ค. + + +# ์ž…๋ ฅ + +์ฒซ ์ค„์— ์ด ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์˜ ๊ฐœ์ˆ˜ T๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. + +๋‘ ๋ฒˆ์งธ ์ค„๋ถ€ํ„ฐ T๊ฐœ์˜ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค๊ฐ€ ์ฐจ๋ก€๋Œ€๋กœ ์ฃผ์–ด์ง„๋‹ค. + +๊ฐ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์˜ ์ฒซ ์ค„์—๋Š” ๋ณดํ˜ธ ํ•„๋ฆ„์˜ ๋‘๊ป˜ D, ๊ฐ€๋กœํฌ๊ธฐ W, ํ•ฉ๊ฒฉ๊ธฐ์ค€ K๊ฐ€ ์ฐจ๋ก€๋กœ ์ฃผ์–ด์ง„๋‹ค. + +๊ทธ ๋‹ค์Œ D์ค„์— ๋ณดํ˜ธ ํ•„๋ฆ„ ๋‹จ๋ฉด์˜ ์ •๋ณด๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. ๊ฐ ์ค„์—๋Š” ์…€๋“ค์˜ ํŠน์„ฑ W๊ฐœ๊ฐ€ ์ฃผ์–ด์ง„๋‹ค. (ํŠน์„ฑA๋Š” 0, ํŠน์„ฑB๋Š” 1๋กœ ํ‘œ์‹œ๋œ๋‹ค.) + + +# ์ถœ๋ ฅ + +ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์˜ ๊ฐœ์ˆ˜๋งŒํผ T์ค„์— T๊ฐœ์˜ ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค ๊ฐ๊ฐ์— ๋Œ€ํ•œ ๋‹ต์„ ์ถœ๋ ฅํ•œ๋‹ค. + +๊ฐ ์ค„์€ โ€œ#xโ€๋กœ ์‹œ์ž‘ํ•˜๊ณ  ๊ณต๋ฐฑ์„ ํ•˜๋‚˜ ๋‘” ๋‹ค์Œ ์ •๋‹ต์„ ์ถœ๋ ฅํ•œ๋‹ค. (x๋Š” 1๋ถ€ํ„ฐ ์‹œ์ž‘ํ•˜๋Š” ํ…Œ์ŠคํŠธ ์ผ€์ด์Šค์˜ ๋ฒˆํ˜ธ์ด๋‹ค) + +์ถœ๋ ฅํ•ด์•ผ ํ•  ์ •๋‹ต์€ ์„ฑ๋Šฅ๊ฒ€์‚ฌ๋ฅผ ํ†ต๊ณผํ•  ์ˆ˜ ์žˆ๋Š” ์•ฝํ’ˆ์˜ ์ตœ์†Œ ํˆฌ์ž… ํšŸ์ˆ˜์ด๋‹ค. ์•ฝํ’ˆ์„ ํˆฌ์ž…ํ•˜์ง€ ์•Š๊ณ ๋„ ์„ฑ๋Šฅ๊ฒ€์‚ฌ๋ฅผ ํ†ต๊ณผํ•˜๋Š” ๊ฒฝ์šฐ์—๋Š” 0์„ ์ถœ๋ ฅํ•œ๋‹ค. + +# ํ’€์ด + +DFS๋ฅผ ํ†ตํ•ด ๊ฐ ์ธต ๋งˆ๋‹ค ์•ฝํ’ˆ A ํ˜น์€ B๋ฅผ ํˆฌ์ž…ํ•˜๊ฑฐ๋‚˜, ํˆฌ์ž…ํ•˜์ง€ ์•Š๋Š” ๊ฒฝ์šฐ๋ฅผ ํƒ์ƒ‰ํ–ˆ๋‹ค. + +๊ทธ๋ฆฌ๊ณ  ๊ฐ ํƒ์ƒ‰๋งˆ๋‹ค ํ…Œ์ŠคํŠธ๋ฅผ ํ†ต๊ณผํ•  ์ˆ˜ ์žˆ๋Š”์ง€ ํ™•์ธํ•˜์—ฌ, ํ†ต๊ณผํ•  ์ˆ˜ ์žˆ๋‹ค๋ฉด ์ตœ์†Œ ์ฃผ์ž… ํšŸ์ˆ˜๋ฅผ ๊ฐฑ์‹ ํ•˜๊ณ  ํƒ์ƒ‰์„ ์ค‘๋‹จํ•œ๋‹ค. + +## `test_film()` + +๋ณดํ˜ธ ํ•„๋ฆ„ ํ…Œ์ŠคํŠธ๋Š” ์„ธ๋กœ๋กœ ์—ฐ์†ํ•˜๋Š” ํŠน์„ฑ์„ ์ฐพ์•„ํ– ํ•œ๋‹ค. + +```py +def chk_test(): + chk_a_list = [0] * k + chk_b_list = [1] * k + + for w_i in range(w): + is_success = False + + for d_i in range(d - k + 1): + cur_chk = [film[tmp_i][w_i] for tmp_i in range(d_i, d_i + k)] + if cur_chk == chk_a_list or cur_chk == chk_b_list: + is_success = True + break + if not is_success: + return False + return True +``` + +๊ทธ๋ž˜์„œ ์œ„์™€ ๊ฐ™์ด `k`๋งŒํผ ์„ธ๋กœ๋กœ ๋ฐฐ์—ด์„ ๋งŒ๋“ค์–ด ๊ฐ™์€ ๊ฐ’์œผ๋กœ ์—ฐ์†ํ•˜๋Š” ์ง€ ํ™•์ธํ•œ๋‹ค. (`[film[tmp_i][w_i] for tmp_i in range(d_i, d_i + k)]`) + +## ์ฝ”๋“œ + +```py +test_case = int(input()) + +def chk_test(): + chk_a_list = [0] * k + chk_b_list = [1] * k + + for w_i in range(w): + is_success = False + + for d_i in range(d - k + 1): + cur_chk = [film[tmp_i][w_i] for tmp_i in range(d_i, d_i + k)] + if cur_chk == chk_a_list or cur_chk == chk_b_list: + is_success = True + break + if not is_success: + return False + return True + + +def test_film(film, depth=0, cnt_inject=0, chk_list=[]): + global min_inject + + if cnt_inject >= min_inject: + return + + if chk_test(): + min_inject = min(min_inject, cnt_inject) + return + + if depth >= d: + return + + origin_membrane = film[depth][:] + + # ํ˜„์žฌ ์ธต์„ ๊ทธ๋Œ€๋กœ + test_film(film, depth + 1, cnt_inject) + + # ํ˜„์žฌ ์ธต์„ a๋กœ + film[depth] = inject_a + test_film(film, depth + 1, cnt_inject + 1) + film[depth] = origin_membrane + + # ํ˜„์žฌ ์ธต์„ b๋กœ + film[depth] = inject_b + test_film(film, depth + 1, cnt_inject + 1) + film[depth] = origin_membrane + +for t in range(test_case): + d, w, k = map(int, input().split()) + + film = [list(map(int, input().split())) for _ in range(d)] + + inject_a = [0] * w + inject_b = [1] * w + + min_inject = float('inf') + + test_film(film) + print(f"#{t + 1} {min_inject}") +``` + +DFS๋ฅผ ์ง„ํ–‰ํ•˜๋ฉด์„œ ์ง€๊ธˆ์˜ ํ•„๋ฆ„(`film`), ๊นŠ์ด(`depth`), ํˆฌ์ž…ํ•œ ํšŸ์ˆ˜(`cnt_inject`)๋ฅผ ๋„˜๊ฒจ์ค€๋‹ค. ์ด ๋•Œ ์•ฝ๋ฌผ์„ ํˆฌ์ž…ํ–ˆ๋‹ค๋ฉด, ์žฌ๊ท€๊ฐ€ ๋๋‚œ ํ›„ ์›๋ž˜ ์ƒํƒœ๋กœ ๋Œ๋ ค์ค€๋‹ค. \ No newline at end of file diff --git a/_posts/2024-09-09-boj_14712.markdown b/_posts/2024-09-09-boj_14712.markdown new file mode 100644 index 00000000000..2d3d514ebd3 --- /dev/null +++ b/_posts/2024-09-09-boj_14712.markdown @@ -0,0 +1,89 @@ +--- +title: "[BOJ]๋„ด๋ชจ๋„ด๋ชจ (Easy) - 14712 (G5)" +author: kwon +date: 2024-09-09T14:00:00 +0900 +categories: [boj, gold] +tags: [brute-force, backtracking] +math: true +mermaid: false +--- + +| ์‹œ๊ฐ„ ์ œํ•œ | ๋ฉ”๋ชจ๋ฆฌ ์ œํ•œ | +| --- | --- | +| 1 ์ดˆ | 512 MB | + +# ๋ฌธ์ œ + +๋„ค๋ชจ๋Š” ๋ฟŒร—ร—ร— ๊ฒŒ์ž„์— ๊นŠ์€ ๊ฐ๋ช…์„ ๋ฐ›์•„, ์ง์‚ฌ๊ฐํ˜• ๋ชจ์–‘์˜ ๊ฒฉ์žํŒ๊ณผ "๋„ด๋ชจ"๋ผ๋Š” ์ˆ˜์ˆ˜๊ป˜๋ผ์˜ ์ƒ๋ฌผ์„ ์ด์šฉํ•˜๋Š” "๋„ด๋ชจ"๋ชจโ€๋ผ๋Š” ๊ฒŒ์ž„์„ ๋งŒ๋“ค์—ˆ๋‹ค. ์ด ๊ฒŒ์ž„์˜ ๊ทœ์น™์€ ์•„์ฃผ ๊ฐ„๋‹จํ•˜๋‹ค. ๊ฒฉ์žํŒ์˜ ๋น„์–ด ์žˆ๋Š” ์นธ์„ ์ž„์˜๋กœ ๊ณจ๋ผ "๋„ด๋ชจ"๋ฅผ ํ•˜๋‚˜ ์˜ฌ๋ ค๋†“๊ฑฐ๋‚˜, "๋„ด๋ชจ"๊ฐ€ ์˜ฌ๋ผ๊ฐ„ ์นธ ๋„ค ๊ฐœ๊ฐ€ 2 ร— 2 ์‚ฌ๊ฐํ˜•์„ ์ด๋ฃจ๋Š” ๋ถ€๋ถ„์„ ์ฐพ์•„ ๊ทธ ์œ„์— ์žˆ๋Š” "๋„ด๋ชจ"๋“ค์„ ๋ชจ๋‘ ์—†์• ๋Š” ๊ฒƒ์„ ์งˆ๋ฆด ๋•Œ๊นŒ์ง€ ๋ฐ˜๋ณตํ•˜๋ฉด ๋œ๋‹ค. + +ํ•˜์ง€๋งŒ ์•ˆํƒ€๊น๊ฒŒ๋„ ๊ฒŒ์ž„์€ ์ •๋ง ์žฌ๋ฏธ๊ฐ€ ์—†์—ˆ๊ณ , ๋„ค๋ชจ๋Š” ์•„์ฃผ ๋นจ๋ฆฌ ์งˆ๋ ค ๋ฒ„๋ฆฌ๊ณ  ๋ง์•˜๋‹ค. ์‹ค๋งํ•œ ๋„ค๋ชจ๋Š” ๊ฒŒ์ž„์„ ์ ๋‹นํžˆ ํ”Œ๋ ˆ์ดํ•˜๋‹ค๊ฐ€, "๋„ด๋ชจ"๋ฅผ ์—†์• ๊ณ  ์‹ถ์€๋ฐ ๊ฒฉ์žํŒ ์œ„์— ์—†์•จ ์ˆ˜ ์žˆ๋Š” "๋„ด๋ชจ"๊ฐ€ ์—†์œผ๋ฉด ๊ฒŒ์ž„์„ ๊ทธ๋งŒ๋‘๊ธฐ๋กœ ํ–ˆ๋‹ค. ๋„ค๋ชจ๊ฐ€ ๊ฒŒ์ž„์„ ๊ทธ๋งŒ๋‘์—ˆ์„ ๋•Œ ๋‚˜์˜ฌ ์ˆ˜ ์žˆ๋Š” "๋„ด๋ชจ"์˜ ๋ฐฐ์น˜์˜ ๊ฐ€์ง“์ˆ˜๋ฅผ ๊ตฌํ•˜์—ฌ๋ผ. + +# ์ž…๋ ฅ + +์ฒซ ๋ฒˆ์งธ ์ค„์— ๊ฒฉ์žํŒ์˜ ํ–‰์˜ ๊ฐœ์ˆ˜ N, ์—ด์˜ ๊ฐœ์ˆ˜ M(1 โ‰ค N, M โ‰ค 25, 1 โ‰ค N ร— M โ‰ค 25)์ด ๊ณต๋ฐฑ์œผ๋กœ ๊ตฌ๋ถ„๋˜์–ด ์ฃผ์–ด์ง„๋‹ค. + +# ์ถœ๋ ฅ + +์ฒซ ๋ฒˆ์งธ ์ค„์— ์ฃผ์–ด์ง„ ๊ฒฉ์žํŒ์—์„œ ๋‚˜์˜ฌ ์ˆ˜ ์žˆ๋Š”, "๋„ด๋ชจ"๋“ค์ด ์˜ฌ๋ผ๊ฐ„ ์นธ์ด 2 ร— 2 ์‚ฌ๊ฐํ˜•์„ ์ด๋ฃจ์ง€ ์•Š๋Š” ๋ชจ๋“  ๋ฐฐ์น˜์˜ ๊ฐ€์ง“์ˆ˜๋ฅผ ์ถœ๋ ฅํ•œ๋‹ค. + +# ํžŒํŠธ + +2ร—2 ๊ฒฉ์žํŒ์— 2ร—2 ์‚ฌ๊ฐํ˜•์„ ์ด๋ฃจ์ง€ ์•Š๋„๋ก "๋„ด๋ชจ"๋“ค์„ ๋ฐฐ์น˜ํ•˜๋Š” ๋ฐฉ๋ฒ•์€ ๋ชจ๋“  ๊ฒฝ์šฐ(24 = 16) ์ค‘ ๋„ค ์นธ ๋ชจ๋‘์— "๋„ด๋ชจ"๊ฐ€ ์˜ฌ๋ผ๊ฐ€ ์žˆ๋Š” ๊ฒฝ์šฐ๋ฅผ ์ œ์™ธํ•œ 15๊ฐ€์ง€๊ฐ€ ์žˆ๋‹ค. + +# ํ’€์ด + +DFS ๊ธฐ๋ฐ˜์˜ ๋ฐฑํŠธ๋ž˜ํ‚น์œผ๋กœ ํ’€์—ˆ๋‹ค. +๊ธฐ๋ณธ์ ์ธ DFS ์ˆ˜ํ–‰์€ x์ถ• ๋ฐฉํ–ฅ์œผ๋กœ ์ง„ํ–‰ํ•˜๋ฉด์„œ ๋„ด๋ชจ๋ฅผ ๋†“๊ฑฐ๋‚˜ ๋†“์ง€ ์•Š๋Š” ๋ฐฉ์‹์œผ๋กœ ์ง„ํ–‰ํ•œ๋‹ค. + +ํ•˜์ง€๋งŒ ๊ตณ์ด ์‚ฌ๊ฐํ˜•์ด ์™„์„ฑ๋˜๋Š” ๊ฒฝ์šฐ๊นŒ์ง€ ๋„ด๋ชจ๋ฅผ ๋†“์„ ํ•„์š”๋Š” ์—†์œผ๋ฏ€๋กœ ๊ทธ๋Ÿฐ ๊ฒฝ์šฐ๋Š” ์ œ์™ธํ•˜๊ณ  ํƒ์ƒ‰ํ•œ๋‹ค. + +์‚ฌ๊ฐํ˜•์ด ์™„์„ฑ๋˜๋Š” ๊ฒƒ์€ ํ˜„์žฌ ์œ„์น˜์—์„œ **์ขŒ์ƒ, ์ขŒ, ์šฐ**์— ์žˆ๋Š” ์นธ์„ ํ™•์ธํ•˜๋ฉด ์•Œ ์ˆ˜ ์žˆ๋‹ค. +๋งŒ์•ฝ ๋‹ค์Œ ๊ทธ๋ฆผ๊ณผ ๊ฐ™์ด ์šฐํ•˜, ์šฐ, ํ•˜์— ์žˆ๋Š” ์นธ์„ ํ™•์ธํ•˜๋ฉด ๋‹ค์Œ์— ๋†“์„ ๋„ด๋ชจ๋ฅผ ๋ฏธ๋ฆฌ ์•Œ ์ˆ˜ ์—†์–ด ์‚ฌ๊ฐํ˜•์ด ์™„์„ฑ๋˜๋Š”์ง€ ํ™•์ธํ•˜๊ธฐ ๊ณค๋ž€ํ•˜๋‹ค. + +![boj_14712-1](/posting_imgs/boj_14712-1.png){: width="40%"} + +๊ทธ๋ž˜์„œ ์•„๋ž˜์™€ ๊ฐ™์ด **์ขŒ์ƒ, ์ขŒ, ์šฐ**๋ฅผ ํ™•์ธํ•˜๋ฉด์„œ x์ถ• ๋ฐฉํ–ฅ์œผ๋กœ ๋„ด๋ชจ๋ฅผ ๋†“๊ฑฐ๋‚˜ ๋†“์ง€ ์•Š๊ฒŒ ํƒ์ƒ‰์„ ์ง„ํ–‰ํ•œ๋‹ค. + +![boj_14712-2](/posting_imgs/boj_14712-2.png){: width="40%"} + +## ์‚ฌ๋ฐฉ ํƒ์ƒ‰์ด ํž˜๋“  ์ด์œ  + +๊ทธ๋ƒฅ ์šฐ๋ฆฌ ๋งŽ์ด ํ•˜๋Š” ๊ฒƒ์ฒ˜๋Ÿผ ์‚ฌ๋ฐฉ ํƒ์ƒ‰์œผ๋กœ ํ•ด๋ฒ„๋ฆฌ๋ฉด ์•ˆ๋˜๋ƒ, ํ•  ์ˆ˜ ์žˆ๋‹ค. +ํ•˜์ง€๋งŒ x์ถ• ๋ฐฉํ–ฅ์œผ๋กœ ์ฐจ๋ก€๋Œ€๋กœ ์ง„ํ–‰ํ•˜์ง€ ์•Š์œผ๋ฉด, ์œ„์—์„œ ์–ธ๊ธ‰ํ•œ ์šฐํ•˜, ์šฐ, ํ•˜๋กœ ์‚ฌ๊ฐํ˜•์„ ํ™•์ธํ•  ๋•Œ์™€ ๊ฐ™์€ ๋ฌธ์ œ๊ฐ€ ๋ฐœ์ƒํ•œ๋‹ค. + +์•„์ง ํƒ์ƒ‰ํ•˜์ง€ ๋ชปํ•œ(๋„ด๋ชจ๋ฅผ ๋†“๊ฑฐ๋‚˜ ๋†“์ง€ ๋ชปํ•œ) ์นธ์— ๋Œ€ํ•ด ์šฐ๋ฆฌ๊ฐ€ ์‚ฌ๊ฐํ˜•์ธ์ง€ ํŒ๋ณ„ํ•  ์ˆ˜ ์—†๊ธฐ ๋•Œ๋ฌธ์ด๋‹ค. ๊ทธ๋ž˜์„œ x์ถ• ๋ฐฉํ–ฅ(๊ธ€ ์“ฐ๋Š” ๋ฐฉํ–ฅ)์œผ๋กœ ํƒ์ƒ‰ํ•˜๋ฉด์„œ ์ด๋ฏธ ํƒ์ƒ‰์ด ๋๋‚œ ์นธ์— ๋Œ€ํ•ด์„œ๋งŒ ์‚ฌ๊ฐํ˜•์„ ํŒ๋ณ„ํ•˜๋„๋ก ๋งŒ๋“ค๊ณ , ๋ชจ๋“  ๊ฒฝ์šฐ์— ๋Œ€ํ•ด ์ œ๋Œ€๋กœ ๋„ด๋ชจ๋ฅผ ๋†“์„ ์ˆ˜ ์žˆ๊ฒŒ ๋œ๋‹ค. + +## ์ฝ”๋“œ + +```python +def fill_nemo(d=0): + global cnt + if N*M == d: + cnt += 1 + return + + y = d // M + 1 + x = d % M + 1 + + # ์‚ฌ๊ฐํ˜•์ด ์™„์„ฑ ์•ˆ๋˜๋Š” ๊ฒฝ์šฐ(๋„ด๋ชจ๋ฅผ ๋†“์„ ์ˆ˜ ์žˆ๋Š” ๊ฒฝ์šฐ) + if matrix[y-1][x] == 0 or matrix[y-1][x-1] == 0 or matrix[y][x-1] == 0: + # ๋‹ค์Œ ์œ„์น˜์— ๋„ค๋ชจ ์ƒ์„ฑ + matrix[y][x] = 1 + fill_nemo(d+1) + matrix[y][x] = 0 + + # ๋‹ค์Œ ์œ„์น˜์— ๋„ค๋ชจ ์ƒ์„ฑ X + fill_nemo(d+1) + + +N, M = map(int, input().split()) + +matrix = [[0]*(M+1) for _ in range(N+1)] + +cnt = 0 +fill_nemo() + +print(cnt) +``` + +**์ขŒ์ƒ, ์ขŒ, ์šฐ**๋ฅผ ํ™•์ธํ•˜๋Š”๋ฐ ํŽธ๋ฆฌํ•˜๊ธฐ ์œ„ํ•ด `[[0]*(M+1) for _ in range(N+1)]`๋กœ ํ•œ ์ค„์˜ padding์„ ์ถ”๊ฐ€ํ–ˆ๋‹ค. ์ด์— ๋งž์ถฐ `x`, `y`๋„ ์›๋ž˜ ๊ฐ’๋ณด๋‹ค +1 ํ•˜์—ฌ ์‚ฌ์šฉํ•˜์˜€๋‹ค. \ No newline at end of file diff --git a/_posts/2024-11-06-dockerCH1.markdown b/_posts/2024-11-06-dockerCH1.markdown new file mode 100644 index 00000000000..fa5374e902a --- /dev/null +++ b/_posts/2024-11-06-dockerCH1.markdown @@ -0,0 +1,105 @@ +--- +title: "CH1. ๋„์ปค๋ž€ ๋ฌด์—‡์ธ๊ฐ€? [๊ทธ๋ฆผ์œผ๋กœ ๋ฐฐ์šฐ๋Š” ๋„์ปค & ์ฟ ๋ฒ„๋„คํ‹ฐ์Šค]" +author: kwon +date: 2024-11-06T14:00:00 +0900 +categories: [๋„์ปค] +tags: [docker] +math: true +mermaid: false +--- + +# SECTION 1. ๋„์ปค๋ž€ ๋ฌด์—‡์ธ๊ฐ€? +--- +## 1. ๋„์ปค์˜ ๊ธฐ๋Šฅ + +> ๋„์ปค๋Š” '๋ฐ์ดํ„ฐ ๋˜๋Š” ํ”„๋กœ๊ทธ๋žจ์„ ๊ฒฉ๋ฆฌ์‹œํ‚ค๋Š” ๊ธฐ๋Šฅ'์„ ์ œ๊ณตํ•˜๋Š” ์†Œํ”„ํŠธ์›จ์–ด๋‹ค. + +์ฃผ๋กœ ์„œ๋ฒ„์— ์‚ฌ์šฉ๋˜๋ฉฐ, ๋‹ค์–‘ํ•œ ํ”„๋กœ๊ทธ๋žจ๊ณผ ๋ฐ์ดํ„ฐ๋ฅผ ๊ฐ๊ฐ ๋…๋ฆฝ๋œ ํ™˜๊ฒฝ์— ๊ฒฉ๋ฆฌํ•˜๋Š” ๊ธฐ๋Šฅ์„ ์ œ๊ณตํ•œ๋‹ค. ์šด์˜์ฒด์ œ(๋น„์Šทํ•œ ๊ฒƒ) ํ†ต์งธ๋กœ ๊ฒฉ๋ฆฌํ•˜๋Š” ๊ธฐ๋Šฅ์ด๋‹ค. + + +## 2. ์ปจํ…Œ์ด๋„ˆ์™€ ๋„์ปค ์—”์ง„ + +### 2-1. ์ปจํ…Œ์ด๋„ˆ + +> ์ปดํ“จํ„ฐ(์„œ๋ฒ„) ์ƒ์˜ ํ™˜๊ฒฝ์„ ์ž‘๊ฒŒ ๋ถ„ํ• ํ•œ ๊ณต๊ฐ„ + +์ด๋ ‡๊ฒŒ ๋‚˜๋ˆ„์–ด์ง„ ์ปจํ…Œ์ด๋„ˆ์— ๋ฐ์ดํ„ฐ๋‚˜ ํ”„๋กœ๊ทธ๋žจ์„ ๋‘์–ด ๊ฒฉ๋ฆฌํ•œ๋‹ค. ์ด๋ฅผ ๊ฒฉ๋ฆฌํ•˜๋Š” ๊ธฐ๋Šฅ์„ ์ œ๊ณตํ•˜๋Š” ์†Œํ”„ํŠธ์›จ์–ด๊ฐ€ ๋„์ปค๋‹ค. +๋„์ปค๋Š” ๋„์ปค ์†Œํ”„ํŠธ์›จ์–ด ๋ณธ์ฒด์ธ **๋„์ปค ์—”์ง„**์„ ์„ค์น˜ํ•ด ์‚ฌ์šฉํ•œ๋‹ค. ์ด **๋„์ปค ์—”์ง„**์„ ์‚ฌ์šฉํ•˜์—ฌ ์ปจํ…Œ์ด๋„ˆ๋ฅผ ์ƒ์„ฑํ•˜๊ณ  ๊ตฌ๋™ํ•  ์ˆ˜ ์žˆ๋‹ค. + +#### ์ด๋ฏธ์ง€ + +์ปจํ…Œ์ด๋„ˆ๋ฅผ ๋งŒ๋“œ๋Š” ๋ฐ์—๋Š” ๋„์ปค ์—”์ง„ ์™ธ์—๋„ **์ด๋ฏธ์ง€**๊ฐ€ ํ•„์š”ํ•˜๋‹ค + +> ์ด๋ฏธ์ง€๋Š” ์ปจํ…Œ์ด๋„ˆ์˜ ๋นตํ‹€๊ณผ๋„ ๊ฐ™์€ ์—ญํ• ์„ ํ•˜๋Š” ๊ฒƒ + +์ด๋ฏธ์ง€์—๋Š” ๋งŽ์€ ์ข…๋ฅ˜๊ฐ€ ์žˆ๋‹ค. ์•„ํŒŒ์น˜ ์ปจํ…Œ์ด๋„ˆ๋ฅผ ๋งŒ๋“ค๋ ค๋ฉด ์•„ํŒŒ์น˜ ์ด๋ฏธ์ง€๋ฅผ ์‚ฌ์šฉํ•˜๊ณ , MySQL ์ปจํ…Œ์ด๋„ˆ๋ฅผ ๋งŒ๋“ค๋ ค๋ฉด MySQL ์ด๋ฏธ์ง€๋ฅผ ์‚ฌ์šฉํ•œ๋‹ค. + +์šฉ๋Ÿ‰์ด ํ—ˆ๋ฝํ•˜๋Š” ํ•œ, ํ•˜๋‚˜์˜ ๋„์ปค์—์„œ ์—ฌ๋Ÿฌ ๊ฐœ์˜ ์ปจํ…Œ์ด๋„ˆ๋ฅผ ๋งŒ๋“ค ์ˆ˜ ์žˆ๋‹ค. + +### 2-2. ๋„์ปค์˜ ์ž‘๋™ +> ๋„์ปค๋Š” ๋ฆฌ๋ˆ…์Šค ์ƒ์—์„œ ๋™์ž‘ํ•œ๋‹ค. + +Windows๋‚˜ MacOS์—์„œ๋„ ๋™์ž‘ํ•˜์ง€๋งŒ ๊ฒฐ๊ตญ ๋‚ด๋ถ€์ ์œผ๋กœ ๋ฆฌ๋ˆ…์Šค๊ฐ€ ๊ฐœ์ž…ํ•œ๋‹ค. ๋˜ํ•œ, ์ปจํ…Œ์ด๋„ˆ์—์„œ ๋™์ž‘์‹œํ‚จ ํ”„๋กœ๊ทธ๋žจ๋„ ๋ฆฌ๋ˆ…์Šค์šฉ ํ”„๋กœ๊ทธ๋žจ์ด๋‹ค. + + +## 3. ๋„์ปค์˜ ํ•„์š”์„ฑ + +### 3-1. ๊ฒฉ๋ฆฌ์˜ ํ•„์š”์„ฑ + +> ํ”„๋กœ๊ทธ๋žจ์€ ๋‹จ๋…์œผ๋กœ ๋™์ž‘ํ•˜์ง€ ์•Š๋Š”๋‹ค. + +๋Œ€๋ถ€๋ถ„์˜ ํ”„๋กœ๊ทธ๋žจ์€ ์–ด๋–ค ์‹คํ–‰ ํ™˜๊ฒฝ์ด๋‚˜ ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ, ๋‹ค๋ฅธ ํ”„๋กœ๊ทธ๋žจ์„ ์ด์šฉํ•ด ๋™์ž‘ํ•œ๋‹ค. +์˜ˆ๋ฅผ ๋“ค์–ด PHP๋กœ ์ž‘์„ฑ๋œ ํ”„๋กœ๊ทธ๋žจ์„ ์‹คํ–‰ํ•˜๋Š” ๋ฐ๋Š” PHP ์‹คํ–‰ ํ™˜๊ฒฝ์ด ํ•„์š”ํ•  ๊ฒƒ์ด๊ณ , python์œผ๋กœ ์ž‘์„ฑ๋œ ํ”„๋กœ๊ทธ๋žจ์€ ๋‹ค๋ฅธ ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ๋ฅผ ์‚ฌ์šฉํ•˜๋Š” ๊ฒฝ์šฐ๊ฐ€ ๋งŽ๋‹ค. + +์†Œํ”„ํŠธ์›จ์–ด๋„ ๋งˆ์ฐฌ๊ฐ€์ง€๋กœ ์—ฌ๋Ÿฌ ๊ฐœ์˜ ํ”„๋กœ๊ทธ๋žจ์œผ๋กœ ๊ตฌ์„ฑ๋œ ๊ฒฝ์šฐ๊ฐ€ ๋งŽ๋‹ค. ์˜ˆ๋ฅผ ๋“ค์–ด WordPress๋Š” MySQL ๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค๋ฅผ ๋”ฐ๋กœ ๊ฐ–์ถ”์ง€ ์•Š์œผ๋ฉด ์‚ฌ์šฉํ•  ์ˆ˜ ์—†๋‹ค. +๋‹ค๋ฅธ ๊ฒฝ์šฐ๋กœ๋Š” ํ”„๋กœ๊ทธ๋žจ๋“ค์ด ํŠน์ •ํ•œ directory๋ฅผ ๊ณต์œ ํ•˜๊ฑฐ๋‚˜ ๊ฐ™์€ ๊ฒฝ๋กœ์— ์„ฑ์ •์„ ์ €์žฅํ•˜๊ธฐ๋„ ํ•œ๋‹ค. + +์ด ๋•Œ๋ฌธ์— ํ•˜๋‚˜์˜ ํ”„๋กœ๊ทธ๋žจ ์—…๋ฐ์ดํŠธ๊ฐ€ ๋‹ค๋ฅธ ํ”„๋กœ๊ทธ๋žจ๋“ค์— ์˜ํ–ฅ์„ ๋ฏธ์น˜๊ฒŒ ๋˜๋Š” ๊ฒฝ์šฐ๊ฐ€ ๋ฐœ์ƒํ•œ๋‹ค. +๋˜๋Š”, ์„œ๋ฒ„๋ฅผ ๊ตฌ์ถ•ํ•  ๋•Œ ์ด๋Ÿฐ ๋ฌธ์ œ๊ฐ€ ๋ฐœ์ƒํ•  ์ˆ˜ ์žˆ๋‹ค. ์„œ๋ฒ„์—์„œ๋Š” ์—ฌ๋Ÿฌ ํ”„๋กœ๊ทธ๋žจ์ด ํ•จ๊ป˜ ๋™์ž‘ํ•˜๊ฒŒ ๋œ๋‹ค. ์ด ๋•Œ ์„ค๊ณ„ ๋‹จ๊ณ„์—์„œ ๋‚˜ํƒ€๋‚˜์ง€ ์•Š์€ ๋ฌธ์ œ๊ฐ€ ๋ฐœ์ƒํ•˜๊ธฐ๋„ ํ•œ๋‹ค. ์ด ๊ฒฝ์šฐ ๋Œ€๋ถ€๋ถ„ ํ”„๋กœ๊ทธ๋žจ ๊ฐ„์˜ ๊ณต์œ  ๋•Œ๋ฌธ์— ๋ฐœ์ƒํ•œ๋‹ค. + +### 3-2. ํ”„๋กœ๊ทธ๋žจ์˜ ๊ฒฉ๋ฆฌ + +๋„์ปค ์ปจํ…Œ์ด๋„ˆ๋Š” ์™„์ „ํžˆ ๋…๋ฆฝ๋œ ํ™˜๊ฒฝ์ด๋ฏ€๋กœ ์—ฌ๋Ÿฌ ์ปจํ…Œ์ด๋„ˆ์—์„œ ๊ฐ™์€ ํ”„๋กœ๊ทธ๋žจ์„ ์‹คํ–‰ํ•  ์ˆ˜ ์žˆ๋‹ค. ์ผ๋ฐ˜์ ์œผ๋กœ ํ•œ ์ปดํ“จํ„ฐ์— ํ•œ ๋ฒŒ์˜ ์†Œํ”„ํŠธ์›จ์–ด๋งŒ์„ ์„ค์น˜ํ•  ์ˆ˜ ์žˆ๋‹ค. ํ•˜์ง€๋งŒ ๊ฒฉ๋ฆฌ๋ฅผ ํ†ตํ•ด ๋‹ค์–‘ํ•œ(ํ˜น์€ ๊ฐ™์€) ๋ฒ„์ „์˜ ์†Œํ”„ํŠธ์›จ์–ด๋ฅผ ์„ค์น˜ํ•  ์ˆ˜ ์žˆ๋‹ค. + +--- + +# SECTION 2. ์„œ๋ฒ„์™€ ๋„์ปค +--- +## 1. ์„œ๋ฒ„ + +### 1-1. ๋‘ ๊ฐ€์ง€ ์˜๋ฏธ์˜ ์„œ๋ฒ„ + +๊ธฐ๋Šฅ์  ์˜๋ฏธ์˜ ์„œ๋ฒ„๋Š” **์–ด๋–ค ๊ธฐ๋Šฅ์„ ์ œ๊ณตํ•˜๋Š” ์˜๋ฏธ**์ด๋‹ค. ์ฆ‰, ์šฐ๋ฒ  ๊ธฐ๋Šฅ์„ ์ œ๊ณตํ•˜๋Š” ์„œ๋ฒ„๋Š” '์›น ์„œ๋ฒ„'์ด๊ณ , ๋ฉ”์ผ ๊ธฐ๋Šฅ์„ ์ œ๊ณตํ•˜๋Š” ์„œ๋ฒ„๋Š” '๋ฉ”์ผ ์„œ๋ฒ„'๊ฐ€ ๋œ๋‹ค. + +๋ฌผ๋ฆฌ์  ์ปดํ“จํ„ฐ๋กœ์˜ ์„œ๋ฒ„๋Š” ๋ฐ์Šคํฌํ†ฑ ์ปดํ“จํ„ฐ์™€ ๋งˆ์ฐฌ๊ฐ€์ง€๋กœ ์–ด๋”˜๊ฐ€์— **๋ฌผ๋ฆฌ์ ์œผ๋กœ ์กด์žฌํ•˜๋Š” ์ปดํ“จํ„ฐ**์ด๋‹ค. + +๊ฐœ์ธ์šฉ ์ปดํ“จํ„ฐ๋Š” ๊ฐœ์ธ์ด ์‚ฌ์šฉํ•˜์ง€๋งŒ ์„œ๋ฒ„๋Š” ์—ฌ๋Ÿฌ ์‚ฌ๋žŒ์ž‰ ์›๊ฒฉ์œผ๋กœ ์ ‘๊ทผํ•ด ์‚ฌ์šฉํ•œ๋‹ค. + +### 1-2. ์„œ๋ฒ„์˜ ๊ธฐ๋Šฅ + +> ์„œ๋ฒ„์˜ ๊ธฐ๋Šฅ์€ ์†Œํ”„ํŠธ์›จ์–ด๊ฐ€ ์ œ๊ณตํ•œ๋‹ค. + +์•„ํŒŒ์น˜ ๊ฐ™์€ ์›น ์„œ๋ฒ„ ์†Œํ”„ํŠธ์›จ์–ด๋ฅผ ์„ค์น˜ํ•˜๋ฉด ์›น ์„œ๋ฒ„ ๊ธฐ๋Šฅ์„ ๊ฐ–์ถ”๋ฉฐ, Sendmail ๊ฐ™์€ ๋ฉ”์ผ ์„œ๋ฒ„ ์†Œํ”„ํŠธ์›จ์–ด๋ฅผ ์„ค์น˜ํ•˜๋ฉด ๋ฉ”์ธ ์„œ๋ฒ„๊ฐ€ ๋œ๋‹ค. +๊ทธ๋ฆฌ๊ณ  ๊ธฐ๋Šฅ์ด ์†Œํ”„ํŠธ์›จ์–ด์—์„œ ๋‚˜์˜จ๋‹ค๋Š” ๋ง์€ "**์—ฌ๋Ÿฌ๊ฐ€์ง€ ์†Œํ”„ํŠธ์›จ์–ด๋ฅผ ํ•œ ์ปดํ“จํ„ฐ์— ์„ค์น˜ํ•  ์ˆ˜ ์žˆ๋‹ค**"๋Š” ๋ง์ด๋‹ค. ๊ทธ๋Ÿฌ๋ฏ€๋กœ ์—ฌ๋Ÿฌ ๊ธฐ๋Šฅ์  ์˜๋ฏธ์˜ ์„œ๋ฒ„๊ฐ€ ํ•˜๋‚˜์˜ ๋ฌผ๋ฆฌ์  ์ปดํ“จํ„ฐ์— ํ•จ๊ป˜ ์กด์žฌํ•  ์ˆ˜ ์žˆ๋‹ค. + +### 1-3. ์ž์œ ๋กœ์ด ์˜ฎ๊ธธ ์ˆ˜ ์žˆ๋Š” ์ปจํ…Œ์ด๋„ˆ + +> ๋„์ปค๋ฅผ ์ด์šฉํ•˜๋ฉด ๋ฌผ๋ฆฌ์  ํ™˜๊ฒฝ์˜ ์ฐจ์ด, ์„œ๋ฒ„ ๊ตฌ์„ฑ์˜ ์ฐจ์ด๋ฅผ ๋ฌด์‹œํ•  ์ˆ˜ ์žˆ๋‹ค. + +์ปจํ…Œ์ด๋„ˆ๋Š” ์ž์œ ๋กญ๊ฒŒ ์˜ฎ๊ธธ ์ˆ˜ ์žˆ๋‹ค. ์‹ค์ œ ์ปจํ…Œ์ด๋„ˆ ์ž์ฒด๋ฅผ ์˜ฎ๊ธด๋‹ค๊ธฐ ๋ณด๋‹ค๋Š” ์ปจํ…Œ์ด๋„ˆ์˜ ์ •๋ณด๋ฅผ ๋‚ด๋ณด๋‚ด๊ธฐํ•œ ๋‹ค์Œ, ๋‹ค๋ฅธ ๋„์ปค ์—”์ง„์—์„œ ๋ณต์›ํ•˜๋Š” ํ˜•ํƒœ์ด๋‹ค. + +์ด๋Ÿฐ ํŠน์„ฑ์„ ํ†ตํ•ด ๋˜‘๊ฐ™์€ ์ƒํƒœ๋กœ ํŠœ๋‹ํ•œ ์ปจํ…Œ์ด๋„ˆ๋ฅผ ํŒ€์› ์ „์›์—๊ฒŒ ๋ฐฐํฌํ•ด ๋ชจ๋‘๊ฐ€ ๋Œ์ผํ•œ ๊ฐœ๋ฐœํ™˜๊ฒฝ์„ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ๋‹ค. + +## ๋„์ปค VS. ๊ฐ€์ƒํ™” + +### ๊ฐ€์ƒํ™” ๊ธฐ์ˆ  + +๋ฉ”์ธ๋ณด๋“œ, CPU, RAM ๋“ฑ์˜ ๋ฌผ๋ฆฌ์ ์ธ ๋ถ€ํ’ˆ์„ ์†Œํ”„ํŠธ์›จ์–ด๋กœ ๊ตฌํ˜„ํ•˜์—ฌ ๋Œ€์ฒดํ•˜๋Š” ๊ฒƒ. +์‹ค์งˆ์ ์œผ๋กœ ๋ฌผ๋ฆฌ ์„œ๋ฒ„์™€ ๋™๋“ฑํ•œ ๊ฒƒ์ด๋ฏ€๋กœ ์šด์˜์ฒด์ œ๋„ ์•„๋ฌด ๊ฒƒ์ด๋‚˜ ์„ค์น˜ํ•  ์ˆ˜ ์žˆ๊ณ , ๊ทธ ์œ„์— ์–ด๋–ค ์†Œํ”„ํŠธ์›จ์–ด๋ฅผ ๊ตฌ๋™ํ•ด๋„ ๋ฌด๋ฐฉํ•˜๋‹ค. + +### ๋„์ปค + +์ปจํ…Œ์ด๋„ˆ์—์„œ ๋ฆฌ๋ˆ…์Šค๊ฐ€ ๋™์ž‘ํ•˜๋Š” ๊ฒƒ์ฒ˜๋Ÿผ ๋ณด์ด์ง€๋งŒ ์‹ค์ œ ๋ฆฌ๋ˆ…์Šค๊ฐ€ ๋™์ž‘ํ•˜๋Š” ๊ฒƒ์€ ์•„๋‹ˆ๋‹ค. ์šด์˜์ฒด์ œ์˜ ๊ธฐ๋Šฅ ์ค‘ ์ผ๋ถ€๋ฅผ ํ˜ธ์ŠคํŠธ ์—ญํ• ์„ ํ•˜๋Š” ๋ฌผ๋ฆฌ ์„œ๋ฒ„์— ๋งก๊ฒจ ๋ถ€๋‹ด์„ ๋œ์–ด ๋‘” ํ˜•ํƒœ์ด๋‹ค. +์ฆ‰, ์ปจํ…Œ์ด๋„ˆ๋Š” ์šด์˜์ฒด์ œ์˜ ์ผ๋ถ€ ๊ธฐ๋Šฅ์„ ํ˜ธ์ŠคํŠธ ์ปดํ“จํ„ฐ์— ์˜์กดํ•˜๊ธฐ ๋•Œ๋ฌธ์— ๋ฌผ๋ฆฌ ์„œ๋ฒ„์—๋„ ๋ฆฌ๋ˆ…์Šค ๊ธฐ๋Šฅ์ด ํ•„์š”ํ•˜๋ฉฐ, ์ปจํ…Œ์ด๋„ˆ์˜ ๋‚ด์šฉ๋„ ๋ฆฌ๋ˆ…์Šค ์šด์˜์ฒด์ œ๊ฐ€ ๋  ์ˆ˜ ๋ฐ–์— ์—†๋‹ค. + +![vm vs docker](/posting_imgs/vm_docker.png) \ No newline at end of file diff --git a/_posts/2024-11-07-dockerCH2.markdown b/_posts/2024-11-07-dockerCH2.markdown new file mode 100644 index 00000000000..295e3bdfcbd --- /dev/null +++ b/_posts/2024-11-07-dockerCH2.markdown @@ -0,0 +1,164 @@ +--- +title: "CH2. ๋„์ปค์˜ ๋™์ž‘ ์›๋ฆฌ [๊ทธ๋ฆผ์œผ๋กœ ๋ฐฐ์šฐ๋Š” ๋„์ปค & ์ฟ ๋ฒ„๋„คํ‹ฐ์Šค]" +author: kwon +date: 2024-11-07T14:00:00 +0900 +categories: [๋„์ปค] +tags: [docker] +math: true +mermaid: false +--- + +# SECTION 1. ๋„์ปค์˜ ๋™์ž‘ ์›๋ฆฌ +--- +## 2. ๋„์ปค์˜ ๊ตฌ์กฐ + +![docker structure](/posting_imgs/docker_structure.png){: width="80%"} + +์œ„ ๊ทธ๋ฆผ์€ ๋„์ปค์˜ ๊ฐ„๋‹จํ•œ ๊ตฌ์กฐ๋ฅผ ๋‚˜ํƒ€๋‚ธ ๊ฒƒ์ด๋‹ค. ๋ฌผ๋ฆฌ ์„œ๋ฒ„๊ฐ€ ์žˆ๊ณ , ์—ฌ๊ธฐ์„œ ๋™์ž‘ํ•˜๋Š” ๋ฆฌ๋ˆ…์Šค ์šด์˜์ฒด์ œ๊ฐ€ ์žˆ๋‹ค. + +์ผ๋ฐ˜์ ์ธ ์„œ๋ฒ„๋ผ๋ฉด ์ด ์ƒํ™ฉ์—์„œ ์šด์˜์ฒด์ œ ์œ„์— ํ”„๋กœ๊ทธ๋žจ์ด๋‚˜ ๋ฐ์ดํ„ฐ๊ฐ€ ์ง์ ‘ ์˜ฌ๋ผ๊ฐ€๊ฒ ์ง€๋งŒ ๋„์ปค๋ฅผ ์‚ฌ์šฉํ•˜๋Š” ๊ฒฝ์šฐ์—๋Š” **OS ์œ„์— ๋„์ปค ์—”์ง„์ด ๋™์ž‘ํ•˜๊ณ  ๊ทธ ์œ„์—์„œ ์ปจํ…Œ์ด๋„ˆ๊ฐ€ ๋™์ž‘**ํ•œ๋‹ค. + +### 2-1. OS๋Š” ๋ญ˜ ํ•˜๋Š”๊ฐ€? + +> ์†Œํ”„ํŠธ์›จ์–ด๋‚˜ ํ”„๋กœ๊ทธ๋žจ์˜ ๋ช…๋ น์„ ํ•˜๋“œ์›จ์–ด์— ์ „๋‹ฌํ•˜๋Š” ์—ญํ• ์„ ํ•œ๋‹ค. + +ํ•˜๋“œ์›จ์–ด๋Š” ์ž์‹ ์˜ ํŒ๋‹จ์œผ๋กœ ์—ฌ๋Ÿฌ ์˜๋ฏธ ์ค‘ ์ ์ ˆํ•œ ๊ฒƒ์„ ๊ณ ๋ฅด๊ฑฐ๋‚˜ ์ ๋‹นํžˆ ์ผ์„ ํ•  ์ˆ˜ ์—†๊ณ , ์ง€์‹œ๋ฐ›์€ ๋Œ€๋กœ๋งŒ ์ˆ˜ํ–‰ํ•  ์ˆ˜ ์žˆ์œผ๋ฏ€๋กœ ๋™์ž‘ ํ•˜๋‚˜ํ•˜๋‚˜๋ฅผ ์ง€์ •ํ•ด ์ฃผ์ง€ ์•Š์œผ๋ฉด ์•ˆ ๋œ๋‹ค. + +OS๋Š” ์ด๋Ÿฐ ์ผ๋“ค์„ ํ•˜๋“œ์›จ์–ด๊ฐ€ ์•Œ์•„๋“ค์„ ์ˆ˜ ์žˆ๊ฒŒ ๋ฒˆ์—ญํ•˜์—ฌ ์ „๋‹ฌํ•˜๊ฒŒ ๋œ๋‹ค. + +### 2-2. ๋„์ปค์˜ ๋ฆฌ๋ˆ…์Šค OS ๋น„์Šทํ•œ ๊ฒƒ + +๋ณธ๋ž˜ OS๋Š” '์ปค๋„'์ด๋ผ๋Š” ๋ถ€๋ถ„๊ณผ '๊ทธ ์ด์™ธ์˜ ์ฃผ๋ณ€ ๋ถ€๋ถ„'์œผ๋กœ ๊ตฌ์„ฑ๋œ๋‹ค. ์ฃผ๋ณ€ ๋ถ€๋ถ„์ด ํ”„๋กœ๊ทธ๋žจ์˜ ์—ฐ๋ฝ ๋‚ด์šฉ์„ ์ปค๋„์— ์ „๋‹ฌํ•˜๊ณ  ์ปค๋„์ด ํ•˜๋“œ์›จ์–ด๋ฅผ ๋‹ค๋ฃฌ๋‹ค. + +์—ฌ๊ธฐ์„œ ๋„์ปค์˜ ์ปจํ…Œ์ด๋„ˆ๋Š” ์šด์˜์ฒด์ œ์˜ ์ฃผ๋ณ€ ๋ถ€๋ถ„์„ ๊ฐ€์ง€๊ณ  ์žˆ๋‹ค. ์ด๋ฅผ ํ†ตํ•ด ํ”„๋กœ๊ทธ๋žจ์˜ ๋ช…๋ น์„ ์ „๋‹ฌ๋ฐ›๊ณ , ์ด๋ฅผ ๋ฐ‘๋ฐ”ํƒ•์ด ๋˜๋Š” ์ปค๋„์— ์ „๋‹ฌํ•˜๋Š” ๊ตฌ์กฐ๋กœ ๋˜์–ด ์žˆ๋‹ค. + +์ด ๋•๋ถ„์— OS ์ „์ฒด๋ฅผ ์ปจํ…Œ์ด๋„ˆ์— ๋„ฃ์ง€ ์•Š์„ ์ˆ˜ ์žˆ์–ด, ๋„์ปค๋Š” ๊ฐ€์žฅ ํฐ ํŠน์ง•์ธ '๊ฐ€๋ฒผ์›€'์„ ์–ป์„ ์ˆ˜ ์žˆ๋‹ค. + +### 2-3. ๋„์ปค๋Š” ๊ธฐ๋ณธ์ ์œผ๋กœ Linux ์šฉ์ด๋‹ค. + +> ๋„์ปค๋Š” ๋ฆฌ๋ˆ…์Šค ์šด์˜์ฒด์ œ๊ฐ€ ๋™์ž‘ํ•˜๋Š” ๊ฒƒ์„ ์ „์ œ๋กœ ํ•˜๋Š” ๊ตฌ์กฐ๋กœ ๋˜์–ด ์žˆ๊ธฐ ๋•Œ๋ฌธ์— ๋ฆฌ๋ˆ…์Šค ์šด์˜์ฒด์ œ์—์„œ๋งŒ ๋™์ž‘ํ•  ์ˆ˜ ์žˆ๋‹ค. + +๊ทธ๋Ÿฌ๋ฏ€๋กœ ์ปจํ…Œ์ด๋„ˆ ์•ˆ์— ๋“ค์–ด ์žˆ๋Š” ์ฃผ๋ณ€ ๋ถ€๋ถ„๋„ ์ด์— ๋งž์ถฐ ๋ฆฌ๋ˆ…์Šค ์šด์˜์ฒด์ œ์˜ ์ฃผ๋ณ€ ๋ถ€๋ถ„์ด์–ด์•ผ ํ•œ๋‹ค. + +๋„์ปค์— ๋Œ€ํ•œ ์ด์•ผ๊ธฐ๋Š” ๋ณดํ†ต ์„œ๋ฒ„ ํ™˜๊ฒฝ์„ ์ „์ œ๋กœ ํ•œ ๊ฒƒ์ด ๋งŽ์€๋ฐ, ๋ฆฌ๋ˆ…์Šค ์šด์˜์ฒด์ œ๊ฐ€ ์„œ๋ฒ„์—์„œ ์‚ฌ์šฉ๋˜๋Š” ๊ฒฝ์šฐ๊ฐ€ ๋งŽ๊ณ , ๋ฆฌ๋ˆ…์Šค์šฉ SW๋„ ์„œ๋ฒ„์šฉ SW๊ฐ€ ๋งŽ๊ธฐ ๋•Œ๋ฌธ์ด๋‹ค. + +#### ์œˆ๋„์šฐ์™€ macOS์—์„œ ๋„์ปค ๊ตฌ๋™ํ•˜๊ธฐ + +VirtualBox๋‚˜ VMware์™€ ๊ฐ™์€ ๊ฐ€์ƒ ํ™˜๊ฒฝ ์œ„์— ๋ฆฌ๋ˆ…์Šค ์šด์˜์ฒด์ œ๋ฅผ ์„ค์น˜ํ•˜๊ณ  ๊ทธ ์œ„์—์„œ ๋„์ปค๋ฅผ ์‹คํ–‰ํ•˜๊ฑฐ๋‚˜$^{1)}$, '์œˆ๋„์šฐ / macOS ์šฉ ๋„์ปค ๋ฐ์Šคํฌํ†ฑ'์ฒ˜๋Ÿผ ๋„์ปค๋ฅผ ์‹คํ–‰ํ•˜๋Š” ๋ฐ ํ•„์š”ํ•œ ๋ฆฌ๋ˆ…์Šค ์šด์˜์ฒด์ œ๋ฅผ ํฌํ•จํ•˜๋Š” ํŒจํ‚ค์ง€๋ฅผ ์„ค์น˜$^{2)}$ํ•ด ์‚ฌ์šฉํ•œ๋‹ค. + +์ฆ‰, ๋„์ปค๋ฅผ ์‚ฌ์šฉํ•˜๋ ค๋ฉด **์–ด๋–ค ํ˜•ํƒœ๋กœ๋“  ๋ฆฌ๋ˆ…์Šค ์šด์˜์ฒด์ œ๋ฅผ ๊ฐ–์ถฐ์•ผ ํ•œ๋‹ค**. + +--- + +# SECTION 2. ๋„์ปค ํ—ˆ๋ธŒ์™€ ์ด๋ฏธ์ง€, ๊ทธ๋ฆฌ๊ณ  ์ปจํ…Œ์ด๋„ˆ +--- +## 1. ์ด๋ฏธ์ง€ + +> ์ด๋ฏธ์ง€๋Š” ์ปจํ…Œ์ด๋„ˆ๋ฅผ ๋งŒ๋“œ๋Š” ๋ฐ ์‚ฌ์šฉํ•˜๋Š” ๊ธˆํ˜•๊ณผ ๊ฐ™๋‹ค. + +์ด๋ฏธ์ง€๋Š” ๊ธˆํ˜•๊ณผ ๊ฐ™์€ ์—ญํ• ์„ ํ•˜๋Š” ๊ฒƒ์œผ๋กœ, ํ•˜๋‚˜๋งŒ ์žˆ์œผ๋ฉด ๋˜‘๊ฐ™์€ ๊ฒƒ์„ ์—ฌ๋Ÿฌ ๊ฐœ ๋งŒ๋“ค ์ˆ˜ ์žˆ๋‹ค. +๋•๋ถ„์— ๋™์ผํ•œ ์ปจํ…Œ์ด๋„ˆ๋ฅผ ์—ฌ๋Ÿฌ ๊ฐœ ๋ฐฐ์น˜๋ผ๊ธฐ ํŽธ๋ฆฌํ•˜๋‹ค. + +์ด๋ฏธ์ง€๋กœ๋งŒ ์ปจํ…Œ์ด๋„ˆ๋ฅผ ๋งŒ๋“ค ์ˆ˜ ์žˆ๋Š” ๊ฒƒ์ด ์•„๋‹ˆ๋ผ ์ปจํ…Œ์ด๋„ˆ๋กœ๋„ ์ด๋ฏธ์ง€๋ฅผ ๋งŒ๋“ค ์ˆ˜ ์žˆ๋‹ค. +์—…๋ฐ์ดํŠธ๋ฅผ ํ•œ ์ปจํ…Œ์ด๋„ˆ์˜ ์ด๋ฏธ์ง€๋ฅผ ๋งŒ๋“ค์–ด ํ•ด๋‹น ํ˜•ํƒœ์˜ ์ปจํ…Œ์ด๋„ˆ๋ฅผ ๋ฐ”๋กœ ๋งŒ๋“ค ์ˆ˜ ์žˆ๊ฒŒ ๋œ๋‹ค. + +์ด๋Ÿฐ ํŠน์„ฑ ๋•๋ถ„์— ๋™์ผํ•œ ์ปจํ…Œ์ด๋„ˆ๋ฅผ ์—ฌ๋Ÿฌ ๊ฐœ ๋งŒ๋“ค์ง€ ์•Š์•„๋„ ์ปจํ…Œ์ด๋„ˆ๋ฅผ ์ด๋™์‹œํ‚ฌ ์ˆ˜ ์žˆ๋‹ค. +์ปจํ…Œ์ด๋„ˆ๋Š” ๋„์ปค ์—”์ง„๋งŒ ์„ค์น˜๋˜์–ด ์žˆ์œผ๋ฉด ๊ตฌ๋™์ด ๊ฐ€๋Šฅํ•˜๋ฏ€๋กœ, ๋‹ค๋ฅธ ์„œ๋ฒ„๋‚˜ ์ปดํ“จํ„ฐ์— ๋„์ปค ์—”์ง„์„ ์„ค์น˜ํ•˜๊ณ  ์ƒˆ๋กœ์šด ๋„์ปค ์—”์ง„์— ์ด๋ฏธ์ง€๋ฅผ ์ด์šฉํ•ด ๋˜‘ํ•ฑ์€ ์ปจํ…Œ์ด๋„ˆ๋ฅผ ์ƒ์„ฑํ•  ์ˆ˜ ์žˆ๋‹ค. + +### 1-1. ๋„์ปค ํ—ˆ๋ธŒ + +๋„์ปค ํ—ˆ๋ธŒ๋Š” ๊ณต๊ฐœ๋œ ์ปจํ…Œ์ด๋„ˆ ์ด๋ฏธ์ง€๊ฐ€ ๋ชจ์—ฌ ์žˆ๋Š” ๊ณณ์ด๋‹ค. +๋„์ปค ํ—ˆ๋ธŒ์—๋Š” ์šด์˜์ฒด์ œ ์ฃผ๋ณ€๋ถ€๋งŒ ๋“ค์–ด ์žˆ๋Š” ์ด๋ฏธ์ง€๋ถ€ํ„ฐ ์—ฌ๋Ÿฌ ์†Œํ”„ํŠธ์›จ์–ด๊ฐ€ ํฌํ•จ๋œ ์ด๋ฏธ์ง€๊นŒ์ง€ ๋‹ค์–‘ํ•œ ๊ฒƒ๋“ค์ด ์กด์žฌํ•œ๋‹ค. + +๋˜ํ•œ, ๋ฆฌ๋ˆ…์Šค์—๋Š” ๋‹ค์–‘ํ•œ ๋ฐฐํฌํŒ์ด ์žˆ๋Š”๋ฐ ์ด๋“ค์˜ ์ด๋ฏธ์ง€๋„ ๋ชจ๋‘ ์ œ๊ณต๋œ๋‹ค. + +#### ์•ˆ์ „ํ•œ ์ปจํ…Œ์ด๋„ˆ ์ด๋ฏธ์ง€๋ฅผ ๊ณ ๋ฅด๋Š” ๋ฐฉ๋ฒ• + +1. ๊ณต์‹ ์ด๋ฏธ์ง€๋ฅผ ์‚ฌ์šฉํ•œ๋‹ค + +๋„์ปค์—์„œ ์ง์ ‘ ๋ฐฐํฌํ•˜๊ฑฐ๋‚˜ ํ•ด๋‹น ์†Œํ”„ํŠธ์›จ์–ด๋ฅผ ๊ฐœ๋ฐœ ๋ฐ ๊ด€๋ฆฌํ•˜๋Š” ๊ธฐ์—…์ด๋‚˜ ์กฐ์ง์—์„œ ์ œ๊ณตํ•˜๋Š” ๊ฒƒ์ด ์žˆ๋‹ค. +์ด๋Ÿฐ ๊ณต์‹ ์ด๋ฏธ์ง€๋ฅผ ์‚ฌ์šฉํ•˜๋ฉด ์ด๋ฏธ์ง€ ์„ ํƒ๋„ ์‰ฝ๊ณ  ๋ณด์•ˆ๋„ ์ฑ™๊ธธ ์ˆ˜ ์žˆ๋‹ค. + +๋‹ค๋งŒ ์ปจํ…Œ์ด๋„ˆ๊ฐ€ ํŠน์ • ์šด์˜์ฒด์ œ ๋ฐ ๋ฒ„์ „์œผ๋กœ ํ•œ์ •๋˜๋Š” ๊ฒฝ์šฐ๊ฐ€ ์žˆ์–ด, ๊ผญ ํŠน์ • ์šด์˜์ฒด์ œ ๋ฐ ๋ฒ„์ „์„ ์‚ฌ์šฉํ•ด์•ผ ํ•˜๋Š” ๊ฒฝ์šฐ ์ฃผ์˜ํ•ด์•ผ ํ•œ๋‹ค. + +2. ์ปค์Šคํ…€ ์ด๋ฏธ์ง€ + +ํ•„์š”ํ•œ ์ตœ์†Œ์˜ ์š”์†Œ๊ฐ€ ๋‹ด๊ธด ์ด๋ฏธ์ง€์— ํ•„์š”ํ•œ ์†Œํ”„ํŠธ์›จ์–ด๋ฅผ ์ถ”๊ฐ€๋กœ ์„ค์น˜ํ•ด ์ปค์Šคํ…€ ์ด๋ฏธ์ง€๋ฅผ ๋งŒ๋“ ๋‹ค. + +์ปจํ…Œ์ด๋„ˆ๋Š” ๋‹ค์–‘ํ•œ ์กฐํ•ฉ์„ ๊ณ ๋ คํ•ด์•ผ ํ•œ๋‹ค. ๋„์ปค๋ฅผ ์‚ฌ์šฉํ•  ๋•Œ์˜ ์›์น™ ์ค‘ ํ•˜๋‚˜๋กœ "ํ•œ ์ปจํ…Œ์ด๋„ˆ์— ํ•œ ํ”„๋กœ๊ทธ๋žจ"์ด๋ผ๋Š” ๊ฒƒ์ด ์žˆ๋‹ค. ์ด๋ ‡๊ฒŒ ๊ด€๋ฆฌํ•˜๋Š” ๊ฒƒ์ด ๋ณด์•ˆ ๋ฐ ์œ ์ง€ ๊ด€๋ฆฌ ์ธก๋ฉด์—์„œ ์œ ๋ฆฌํ•˜๊ธฐ ๋•Œ๋ฌธ์— ๋งŽ์ด ์“ฐ์ด๋Š” ์ •์ฑ…์ด๋‹ค. + +## ์šด์˜์ฒด์ œ ์ฃผ๋ณ€๋ถ€๋ฅผ ํ•œ ์ข…๋ฅ˜๋กœ ํ†ต์ผํ•ด์•ผ ํ• ๊นŒ? + +์ปจํ…Œ์ด๋„ˆ ๊ฐ„์— ์™„์ „ํžˆ ๊ฒฉ๋ฆฌ๋˜์–ด ์žˆ์œผ๋ฏ€๋กœ ์„œ๋กœ ๋‹ค๋ฅธ ๋ฐฐํฌํŒ์„ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ์œผ๋ฉฐ, ์ด๋Š” ๋„์ปค์˜ ๋งค๋ ฅ ์ค‘ ํ•˜๋‚˜์ด๋‹ค. + +ํ•˜์ง€๋งŒ ์ปจํ…Œ์ด๋„ˆ์— ๋กœ๊ทธ์ธ์„ ํ•  ํ•„์š”๊ฐ€ ์žˆ๊ฑฐ๋‚˜ ํŠน์ • DBMS๋ฅผ ์‚ฌ์šฉํ•  ๋•Œ๋Š” ์šด์˜์ฒด์ œ ์ข…๋ฅ˜์— ๋”ฐ๋ผ ๋ฌธ์ œ๋ฅผ ์ผ์œผํ‚ฌ ์ˆ˜ ์žˆ์œผ๋ฏ€๋กœ ๋ฐฐํฌํŒ์„ ์ •ํ™•ํžˆ ์„ ํƒํ•ด์•ผ ํ•œ๋‹ค. + +--- + +# SECTION 3. ๋„์ปค ์ปจํ…Œ์ด๋„ˆ์˜ ์ƒ์• ์ฃผ๊ธฐ์™€ ๋ฐ์ดํ„ฐ ์ €์žฅ +--- +> ์ปจํ…Œ์ด๋„ˆ๋Š” '์˜ค๋žซ๋™์•ˆ ์•„๊ปด ์“ฐ๋Š” ๋ฌผ๊ฑด'์ด ์•„๋‹ˆ๋ผ '๊ทธ๋•Œ ๊ทธ๋•Œ ์“ฐ๊ณ  ๋ฒ„๋ฆฌ๋Š” ์ผํšŒ์šฉํ’ˆ'์— ๊ฐ€๊น๋‹ค. + +## 1. ์ปจํ…Œ์ด๋„ˆ์˜ ์ƒ์•  ์ฃผ๊ธฐ + +์ปจํ…Œ์ด๋„ˆ๋Š” ์‰ฝ๊ฒŒ ๋งŒ๋“ค ์ˆ˜ ์žˆ๊ธฐ ๋•Œ๋ฌธ์—, ํ•˜๋‚˜๋ฅผ ์—…๋ฐ์ดํŠธํ•˜๋ฉด์„œ ๊ณ„์† ์‚ฌ์šฉํ•˜๊ธฐ๋ณด๋‹ค๋Š” ์—…๋ฐ์ดํŠธ๋œ ์†Œํ”„ํŠธ์›จ์–ด๊ฐ€ ๋“ค์–ด์žˆ๋Š” ์ƒˆ๋กœ์šด ์ปจํ…Œ์ด๋„ˆ๋ฅผ ์‚ฌ์šฉํ•˜๋Š” ๊ฒƒ์ด ์ข‹๋‹ค. + +๊ธฐ๋ณธ์ ์œผ๋กœ ์ปจํ…Œ์ด๋„ˆ๋Š” ์—ฌ๋Ÿฌ ๊ฐœ๋ฅผ ๋™์‹œ์— ๊ฐ€๋™ํ•˜๋Š” ์ƒํ™ฉ์„ ์ „์ œ๋กœ ํ•˜๋Š”๋ฐ ์ด๋ฅผ ๋ชจ๋‘ ์—…๋ฐ์ดํŠธํ•˜๋ ค๋ฉด ๋งŽ์€ ์ˆ˜๊ณ ๊ฐ€ ๋“ ๋‹ค. ์ด๋Š” ์ปจํ…Œ์ด๋„ˆ์˜ ์žฅ์ ์ด ํ‡ด์ƒ‰๋˜๋Š” ๊ฒƒ์ด๋‹ค. + +๊ทธ๋Ÿฌ๋ฏ€๋กœ ์˜ค๋ž˜๋œ ์ปจํ…Œ์ด๋„ˆ๋ฅผ ๋ฒ„๋ฆฌ๊ณ  ์ƒˆ๋กœ์šด ์ด๋ฏธ์ง€๋กœ๋ถ€ํ„ฐ ์ƒˆ๋กœ์šด ์ปจํ…Œ์ด๋„ˆ๋ฅผ ๋งŒ๋“ค์–ด ๊ฐˆ์•„ํƒ€๋Š” ๋ฐฉ์‹์„ ์‚ฌ์šฉํ•œ๋‹ค. +์ด์ฒ˜๋Ÿผ ์ปจํ…Œ์ด๋„ˆ๋ฅผ '๋งŒ๋“ค๊ณ ', '์‹คํ–‰ํ•˜๊ณ ', '์ข…๋ฃŒํ•˜๊ณ ', 'ํ๊ธฐ'ํ•œ ๋‹ค์Œ ๋‹ค์‹œ ์ปจํ…Œ์ด๋„ˆ๋ฅผ '๋งŒ๋“œ๋Š”' ์ผ๋ จ์˜ ๊ณผ์ •์„ **์ปจํ…Œ์ด๋„ˆ์˜ ์ƒ์•  ์ฃผ๊ธฐ**๋ผ๊ณ  ํ•œ๋‹ค. + +### 1-1. ๋ฐ์ดํ„ฐ ์ €์žฅ + +์ปจํ…Œ์ด๋„ˆ๋ฅผ ํ๊ธฐํ•  ๋•Œ ๋ฐ์ดํ„ฐ๋ฅผ ๋ณด์กดํ•˜๊ธฐ ์œ„ํ•ด **๋„์ปค๊ฐ€ ์„ค์น˜๋œ ๋ฌผ๋ฆฌ์  ์„œ๋ฒ„(ํ˜ธ์ŠคํŠธ)์˜ ๋””์Šคํฌ๋ฅผ ๋งˆ์šดํŠธํ•ด** ์ด ๋””์Šคํฌ์— ๋ฐ์ดํ„ฐ๋ฅผ ์ €์žฅํ•œ๋‹ค. + +์—ฌ๊ธฐ์„œ ๋งˆ์šดํŠธ๋Š” "๋””์Šคํฌ๋ฅผ ์—ฐ๊ฒฐํ•ด ๋ฐ์ดํ„ฐ๋ฅผ ๊ธฐ๋กํ•  ์ˆ˜ ์žˆ๋„๋ก ํ•œ ์ƒํƒœ"๋ฅผ ์˜๋ฏธํ•œ๋‹ค. ์ด๋Ÿฐ ๋ฐฉ๋ฒ•์œผ๋กœ ์ปจํ…Œ์ด๋„ˆ ์™ธ๋ถ€์— ๋ฐ์ดํ„ฐ๋ฅผ ์•ˆ์ „ํ•˜๊ฒŒ ์ €์žฅํ•œ๋‹ค. + +์šด์˜์ฒด์ œ๋‚˜ ์†Œํ”„ํŠธ์›จ์–ด ๋ถ€๋ถ„์€ ์ปจํ…Œ์ด๋„ˆ ํ˜•ํƒœ๋กœ ๋งŒ๋“ค์–ด ์“ฐ๊ณ  ๋ฒ„๋ฆฌ๋Š” ๊ฒƒ์„ ๋ฐ˜๋ณตํ•˜๊ณ , ๋ฐ์ดํ„ฐ๋Š” ๋‹ค๋ฅธ ๊ณณ์— ์ €์žฅํ•ด ๊ณ„์† ์‚ฌ์šฉํ•œ๋‹ค. ์ด๋Š” ์„ค์ • ํŒŒ์ผ๋„ ๋งˆ์ฐฌ๊ฐ€์ง€๋กœ ํŒŒ์ผ์„ ์ˆ˜์ •ํ–ˆ๋‹ค๋ฉด ์‚ญ์ œ๋˜์ง€ ์•Š์„ ๊ณณ์— ์ €์žฅํ•œ๋‹ค. + +๊ทธ๋Ÿฌ๋‚˜ ํ”„๋กœ๊ทธ๋žจ์„ ๊ฐœ๋ฐœ ํ•  ๋•Œ๋Š” ๋‹ค๋ฅธ ์ €์žฅ์†Œ์— ์ €์žฅํ•˜์ง€ ์•Š๋Š” ๊ฒฝ์šฐ๋„ ์žˆ์œผ๋ฏ€๋กœ ์ปจํ…Œ์ด๋„ˆ๋ฅผ ํ๊ธฐํ•˜๊ธฐ ์ „ ์ค‘์š”ํ•œ ๋ฐ์ดํ„ฐ๊ฐ€ ์ปจํ…Œ์ด๋„ˆ์— ํฌํ•จ๋˜์–ด ์žˆ๋Š”์ง€ ํ™•์ธํ•ด์•ผ ํ•œ๋‹ค. + +--- + +# SECTION 4. ๋„์ปค์˜ ์žฅ์ ๊ณผ ๋‹จ์  +--- + +## 1. ๋„์ปค์˜ ๊ตฌ์กฐ์™€ ์„ฑ์งˆ ๋ฐ ๊ทธ ์žฅ๋‹จ์  + +### 1-1. ํ™˜๊ฒฝ์„ ๊ฒฉ๋ฆฌํ•  ์ˆ˜ ์žˆ๋‹ค. + +#### ๋…๋ฆฝ๋œ ํ™˜๊ฒฝ + +๋…๋ฆฝ๋œ ํ™˜๊ฒฝ ๋•๋ถ„์— ์—ฌ๋Ÿฌ ๊ฐœ์˜ ์ปจํ…Œ์ด๋„ˆ๋ฅผ ๋„์šธ ์ˆ˜ ์žˆ์œผ๋ฉฐ, ๋˜‘๊ฐ™์€ ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜๋„ ์—ฌ๋Ÿฌ ๊ฐœ ๋„์šธ ์ˆ˜ ์žˆ๋‹ค. + +#### ์ด๋ฏธ์ง€๋ฅผ ๋งŒ๋“ค ์ˆ˜ ์žˆ๋‹ค. + +์ด๋ฏธ์ง€๋ฅผ ๋งŒ๋“ค๊ณ  ๋ฐฐํฌํ•  ์ˆ˜ ์žˆ์œผ๋ฏ€๋กœ, ๋ชจ๋“  ์ด๋ฏธ์ง€๋ฅผ ์ฒ˜์Œ๋ถ€ํ„ฐ ๋งŒ๋“ค์ง€ ์•Š์•„๋„ ์ด๋ฏธ์ง€๋ฅผ ๋‚ด๋ ค๋ฐ›์•„ ํŽธํ•˜๊ฒŒ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ๋‹ค. + +๋˜ํ•œ ๊ตฌ์ถ• ์ž‘์—…์ด ๊ฐ„๋‹จํ•ด์ ธ์„œ ๊ต์ฒด๊ฐ€ ์‰ฝ๊ณ , ์—…๋ฐ์ดํŠธ๊ฐ€ ์‰ฝ๋‹ค. ์ด๋Š” ์ด๋™์„ฑ์ด ์ข‹๋‹ค๋Š” ํŠน์„ฑ์œผ๋กœ๋„ ์ด์–ด์ง„๋‹ค. ๋˜‘๊ฐ™์€ ์ปจํ…Œ์ด๋„ˆ๋ฅผ ์‰ฝ๊ฒŒ ๋งŒ๋“ค ์ˆ˜ ์žˆ์œผ๋ฏ€๋กœ, ํ™˜๊ฒฝ ์ด๋™์ด๋‚˜ ๊ฐœ๋ฐœํ™˜๊ฒฝ์„ ๊ตฌ์ถ•ํ•˜๊ธฐ๋„ ์‰ฝ๋‹ค. + +#### ์ปจํ…Œ์ด๋„ˆ์— ์ปค๋„์„ ํฌํ•จํ•  ํ•„์š”๊ฐ€ ์—†๋‹ค + +์ปจํ…Œ์ด๋„ˆ์—๋Š” ์ปค๋„์ด ์—†์–ด๋„ ๋˜์–ด ๊ฐ€๋ณ๋‹ค. + +### 1-2. ๋„์ปค์˜ ์žฅ๋‹จ์  + +#### ์žฅ์  + +- ํ•œ ๋Œ€์˜ ๋ฌผ๋ฆฌ ์„œ๋ฒ„์— ์—ฌ๋Ÿฌ ๋Œ€์˜ ์„œ๋ฒ„๋ฅผ ๋„์šธ ์ˆ˜ ์žˆ๋‹ค. +- ์„œ๋ฒ„ ๊ด€๋ฆฌ๊ฐ€ ์šฉ์ดํ•˜๋‹ค. +- ์„œ๋ฒ„ ๊ณ ์ˆ˜๊ฐ€ ์•„๋‹ˆ์–ด๋„ ๋‹ค๋ฃจ๊ธฐ ์‰ฝ๋‹ค. + +#### ๋‹จ์  + +- ๋ฆฌ๋ˆ…์Šค์šฉ ์†Œํ”„ํŠธ์›จ์–ด๋งŒ ์ง€์›ํ•œ๋‹ค. +- ํ˜ธ์ŠคํŠธ ์„œ๋ฒ„์— ๋ฌธ์ œ๊ฐ€ ์ƒ๊ธฐ๋ฉด ๋ชจ๋“  ์ปจํ…Œ์ด๋„ˆ์— ์˜ํ–ฅ์ด ๋ฏธ์นœ๋‹ค. + +### 1-3. ๋„์ปค์˜ ์ฃผ ์šฉ๋„ + +- ํŒ€์› ๋ชจ๋‘์—๊ฒŒ ๋™์ผํ•œ ๊ฐœ๋ฐœํ™˜๊ฒฝ ์ œ๊ณต + - ๋™์ผํ•œ ํ™˜๊ฒฝ์„ ์—ฌ๋Ÿฌ ๊ฐœ ๋งŒ๋“ค๊ธฐ +- ์ƒˆ๋กœ์šด ๋ฒ„์ „์˜ ํ…Œ์ŠคํŠธ + - ๊ฒฉ๋ฆฌ๋œ ํ™˜๊ฒฝ์„ ์ด์šฉ +- ๋™์ผํ•œ ์„œ๋ฒ„๊ฐ€ ์—ฌ๋Ÿฌ ๋Œ€ ํ•„์š”ํ•œ ๊ฒฝ์šฐ + - ์ปจํ…Œ์ด๋„ˆ ๋ฐ–๊ณผ ๋…๋ฆฝ๋œ ์„ฑ์งˆ์„ ์ด์šฉ \ No newline at end of file diff --git a/_posts/2024-11-17-dockerCH4.markdown b/_posts/2024-11-17-dockerCH4.markdown new file mode 100644 index 00000000000..775f12887a3 --- /dev/null +++ b/_posts/2024-11-17-dockerCH4.markdown @@ -0,0 +1,612 @@ +--- +title: "CH4. ์ปจํ…Œ์ด๋„ˆ๋ฅผ ์‹คํ–‰ํ•ด ๋ณด์ž [๊ทธ๋ฆผ์œผ๋กœ ๋ฐฐ์šฐ๋Š” ๋„์ปค & ์ฟ ๋ฒ„๋„คํ‹ฐ์Šค]" +author: kwon +date: 2024-11-17T14:00:00 +0900 +categories: [๋„์ปค] +tags: [docker] +math: true +mermaid: false +--- + +# SECTION 1. ๋„์ปค ์—”์ง„ ์‹œ์ž‘ํ•˜๊ธฐ/์ข…๋ฃŒํ•˜๊ธฐ +--- +## ๋ฆฌ๋ˆ…์Šค์—์„œ ์‹œ์ž‘ํ•˜๊ธฐ +```bash +# ๋„์ปค ์—”์ง„ ์‹œ์ž‘ +sudo systemtl start docker +# ๋„์ปค ์—”์ง„ ์ข…๋ฃŒ +sudo systemtl stop docker +# ์ž๋™ ์‹คํ–‰ ์„ค์ • +sudo systemtl enable docker +``` + +--- + +# SECTION 2. ์ปจํ…Œ์ด๋„ˆ์˜ ๊ธฐ๋ณธ์ ์ธ ์‚ฌ์šฉ๋ฐฉ๋ฒ• +> ์ปจํ…Œ์ด๋„ˆ๋ฅผ ๋‹ค๋ฃจ๋Š” ๋ชจ๋“  ๋ช…๋ น์€ `docker` ๋ช…๋ น์–ด๋กœ ์‹œ์ž‘ํ•œ๋‹ค +```bash +docker ~ +``` + +## ๋ช…๋ น์–ด์™€ ๋Œ€์ƒ + +`docker` ๋ช…๋ น์–ด ๋’ค์— '๋ฌด์—‡์„', '์–ด๋–ป๊ฒŒ', '๋Œ€์ƒ' ์ˆœ์œผ๋กœ ์ง€์ •ํ•˜์—ฌ ๋ช…๋ น์–ด๋ฅผ ์ž‘์„ฑํ•œ๋‹ค. + +![](/posting_imgs/docker-command1.png) + +'๋ฌด์—‡์„', ์–ด๋–ป๊ฒŒ ๋ถ€๋ถ„์„ ์ปค๋งจ๋“œ๋ผ๊ณ  ํ•˜๊ณ , ์ƒ์œ„ ์ปค๋งจ๋“œ๊ฐ€ '๋ฌด์—‡์„', ํ•˜์œ„ ์ปค๋งจ๋“œ๊ฐ€ '์–ด๋–ป๊ฒŒ'์— ํ•ด๋‹นํ•˜๋Š” ๋‚ด์šฉ์„ ์ง€์ •ํ•œ๋‹ค. + +์ƒ์œ„ ์ปค๋งจ๋“œ์— ๋“ค์–ด๊ฐ€๋Š” **๋Œ€์ƒ์˜ ์ข…๋ฅ˜**๋Š” 12์ข…๋ฅ˜์ด๋‹ค. ๊ฐœ์ธ์ ์œผ๋กœ๋Š” ํ”„๋กœ๊ทธ๋ž˜๋ฐ ์–ธ์–ด์—์„œ์˜ type๊ณผ ๊ฐ™์€ ๊ฒƒ์ด๋ผ๊ณ  ์ดํ•ดํ–ˆ๋‹ค. + +```bash +docker image pull penguin +docker container start penguin +... +``` + +## ์˜ต์…˜๊ณผ ์ธ์ž + +ํ•„์š”์— ๋”ฐ๋ผ '์˜ต์…˜'์ด๋‚˜ '์ธ์ž'๋ฅผ ๋ถ™์—ฌ ์‚ฌ์šฉํ•˜๋Š” ๊ฒฝ์šฐ๋„ ์žˆ๋‹ค + +![](/posting_imgs/docker-command2.png) + +์œ„ ๋ช…๋ น์–ด๋Š” penguin container๋ฅผ ๋ฐฑ๊ทธ๋ผ์šด๋“œ๋กœ(`-d`), mode 1๋กœ(`--mode=1`) ์‹คํ–‰ํ•˜๋ผ๋Š” ์˜๋ฏธ์ด๋‹ค. + +๋ชจ๋“  ๋ช…๋ น์–ด์— ์˜ต์…˜์ด๋‚˜ ์ธ์ž๊ฐ€ ๋ถ™๋Š” ๊ฒƒ์€ ์•„๋‹ˆ๋ฉฐ, ์—ฌ๋Ÿฌ๊ฐœ ๋ถ™์„ ์ˆ˜ ์žˆ๋Š” ๋ช…๋ น์–ด๋ผ๋„ ์ž์ฃผ ์“ฐ์ด๋Š” ๊ฒƒ์€ ํ•œ์ •๋˜์–ด ์žˆ์œผ๋ฏ€๋กœ ๊ธฐ์–ตํ•ด๋†“๋Š” ๊ฒƒ์ด ์ข‹๋‹ค. + +## ๊ธฐ๋ณธ์ ์ธ ๋ช…๋ น์–ด + +### ์ปจํ…Œ์ด๋„ˆ ์กฐ์ž‘ ๊ด€๋ จ ์ปค๋งจ๋“œ(`container`) + +| ํ•˜์œ„ ์ปค๋งจ๋“œ | ๋‚ด์šฉ | ์ƒ๋žต ๊ฐ€๋Šฅ ์—ฌ๋ถ€ | ์ฃผ์š” ์˜ต์…˜ | +| :---------: | :------------------------------------------------------------------------------------------------------------------------ | :------------: | :------------------------------------: | +| `start` | ์ปจํ…Œ์ด๋„ˆ ์‹คํ–‰ | O | `-i` | +| `stop` | ์ปจํ…Œ์ด๋„ˆ ์ •์ง€ | O | ๊ฑฐ์˜ ์‚ฌ์šฉํ•˜์ง€ ์•Š์Œ | +| `create` | ๋„์ปค ์ด๋ฏธ์ง€๋กœ๋ถ€ํ„ฐ ์ปจํ…Œ์ด๋„ˆ๋ฅผ ์ƒ์„ฑ | O | `--name` `-e` `-p` `-v` | +| `run` | ๋„์ปค ์ด๋ฏธ์ง€๋ฅผ ๋‚ด๋ ค๋ฐ›๊ณ  ์ปจํ…Œ์ด๋„ˆ๋ฅผ ์ƒ์„ฑํ•ด ์‹คํ–‰(`docker image pull` + `docker container create` + `docker container start`) | O | `--name` `-e` `-p` `-v` `-d` `-i` `-t` | +| `rm` | ์ •์ง€ ์ƒํƒœ์˜ ์ปจํ…Œ์ด๋„ˆ ์‚ญ์ œ | O | `-f` `-v` | +| `exec` | ์‹คํ–‰ ์ค‘์ธ ์ปจํ…Œ์ด๋„ˆ ์†์—์„œ ํ”„๋กœ๊ทธ๋žจ์„ ์‹คํ–‰ | O | `-i` `-t` | +| `ls` | ์ปจํ…Œ์ด๋„ˆ ๋ชฉ๋ก ์ถœ๋ ฅ | `docker ps` | `-a` | +| `cp` | ๋„์ปค ์ปจํ…Œ์ด๋„ˆ์™€ ๋„์ปค ํ˜ธ์ŠคํŠธ ๊ฐ„์— ํŒŒ์ผ ๋ณต์‚ฌ | O | ๊ฑฐ์˜ ์‚ฌ์šฉํ•˜์ง€ ์•Š์Œ | +| `commit` | ๋„์ปค ์ปจํ…Œ์ด๋„ˆ๋ฅผ ์ด๋ฏธ์ง€๋กœ ๋ณ€ํ™˜ | O | ๊ฑฐ์˜ ์‚ฌ์šฉํ•˜์ง€ ์•Š์Œ | + + +### ์ด๋ฏธ์ง€ ์กฐ์ž‘ ๊ด€๋ จ ์ปค๋งจ๋“œ(`image`) + +| ํ•˜์œ„ ์ปค๋งจ๋“œ | ๋‚ด์šฉ | ์ƒ๋žต ๊ฐ€๋Šฅ ์—ฌ๋ถ€ | ์ฃผ์š” ์˜ต์…˜ | +| :---------: | :---------------------------------------------- | :------------: | :----------------: | +| `pull` | ๋„์ปค ํ—ˆ๋ธŒ ๋“ฑ์˜ repository์—์„œ ์ด๋ฏธ์ง€๋ฅผ ๋‚ด๋ ค๋ฐ›์Œ | O | ๊ฑฐ์˜ ์‚ฌ์šฉํ•˜์ง€ ์•Š์Œ | +| `rm` | ๋„์ปค ์ด๋ฏธ์ง€ ์‚ญ์ œ | `docker rmi` | ๊ฑฐ์˜ ์‚ฌ์šฉํ•˜์ง€ ์•Š์Œ | +| `ls` | ๋‚ด๋ ค๋ฐ›์€ ์ด๋ฏธ์ง€ ๋ชฉ๋ก | X | ๊ฑฐ์˜ ์‚ฌ์šฉํ•˜์ง€ ์•Š์Œ | +| `build` | ๋„์ปค ์ด๋ฏธ์ง€๋ฅผ ์ƒ์„ฑ | O | `-t` | + +### ๋ณผ๋ฅจ ์กฐ์ž‘ ๊ด€๋ จ ์ปค๋งจ๋“œ(`volume`) + +| ํ•˜์œ„ ์ปค๋งจ๋“œ | ๋‚ด์šฉ | ์ƒ๋žต ๊ฐ€๋Šฅ ์—ฌ๋ถ€ | ์ฃผ์š” ์˜ต์…˜ | +| :---------: | :----------------------------- | :------------: | :----------------: | +| `create` | ๋ณผ๋ฅจ ์ƒ์„ฑ | X | `--name` | +| `inspect` | ๋ณผ๋ฅจ์˜ ์ƒ์„ธ ์ •๋ณด ์ถœ๋ ฅ | X | ๊ฑฐ์˜ ์‚ฌ์šฉํ•˜์ง€ ์•Š์Œ | +| `ls` | ๋ณผ๋ฅจ ๋ชฉ๋ก | X | `-a` | +| `prune` | ๋งˆ์šดํŠธ๋˜์ง€ ์•Š์€ ๋ณผ๋ฅจ ๋ชจ๋‘ ์‚ญ์ œ | X | ๊ฑฐ์˜ ์‚ฌ์šฉํ•˜์ง€ ์•Š์Œ | +| `rm` | ์ง€์ •ํ•œ ๋ณผ๋ฅจ ์‚ญ์ œ | X | ๊ฑฐ์˜ ์‚ฌ์šฉํ•˜์ง€ ์•Š์Œ | + +### ๋„คํŠธ์›Œํฌ ์กฐ์ž‘ ๊ด€๋ จ ์ปค๋งจ๋“œ(`network`) + +๋„์ปค ๋„คํŠธ์›Œํฌ๋ž€ ๋„์ปค ์š”์†Œ ๊ฐ„์˜ ํ†ค์‹ ์— ์‚ฌ์šฉํ•˜๋Š” ๊ฐ€์žฅ ๋„คํŠธ์›Œํฌ์ด๋‹ค. + +| ํ•˜์œ„ ์ปค๋งจ๋“œ | ๋‚ด์šฉ | ์ƒ๋žต ๊ฐ€๋Šฅ ์—ฌ๋ถ€ | ์ฃผ์š” ์˜ต์…˜ | +| :----------: | :------------------------------------- | :------------: | :----------------: | +| `connect` | ์ปจํ…Œ์ด๋„ˆ๋ฅผ ๋„์ปค ๋„คํŠธ์›Œํฌ์— ์—ฐ๊ฒฐ | X | ๊ฑฐ์˜ ์‚ฌ์šฉํ•˜์ง€ ์•Š์Œ | +| `disconnect` | ์ปจํ…Œ์ด๋„ˆ๋ฅผ ๋„์ปค ๋„คํŠธ์›Œํฌ ์—ฐ๊ฒฐ ํ•ด์ œ | X | ๊ฑฐ์˜ ์‚ฌ์šฉํ•˜์ง€ ์•Š์Œ | +| `create` | ๋„์ปค ๋„คํŠธ์›Œํฌ ์ƒ์„ฑ | X | ๊ฑฐ์˜ ์‚ฌ์šฉํ•˜์ง€ ์•Š์Œ | +| `inspect` | ๋„์ปค ๋„คํŠธ์›Œํฌ ์ƒ์„ธ ์ •๋ณด ์ถœ๋ ฅ | X | ๊ฑฐ์˜ ์‚ฌ์šฉํ•˜์ง€ ์•Š์Œ | +| `ls` | ๋„์ปค ๋„คํŠธ์›Œํฌ ๋ชฉ๋ก | X | ๊ฑฐ์˜ ์‚ฌ์šฉํ•˜์ง€ ์•Š์Œ | +| `prune` | ์ปจํ…Œ์ด๋„ˆ๊ฐ€ ์ ‘์†ํ•˜์ง€ ์•Š์€ ๋„คํŠธ์›Œํฌ ์‚ญ์ œ | X | ๊ฑฐ์˜ ์‚ฌ์šฉํ•˜์ง€ ์•Š์Œ | +| `rm` | ์ง€์ •ํ•œ ๋„คํŠธ์›Œํฌ ์‚ญ์ œ | X | ๊ฑฐ์˜ ์‚ฌ์šฉํ•˜์ง€ ์•Š์Œ | + +### ๋‹จ๋…์œผ๋กœ ์“ฐ์ด๋Š” ์ปค๋งจ๋“œ + +๋„์ปค ํ—ˆ๋ธŒ์˜ ๊ฒ€์ƒ‰์ด๋‚˜ ๋กœ๊ทธ์ธ์— ์‚ฌ์šฉ๋˜๋Š” ์ปค๋งจ๋“œ + +| ์ปค๋งจ๋“œ | ๋‚ด์šฉ | ์ฃผ์š” ์˜ต์…˜ | +| :---------: | :----------------------------------- | :----------------: | +| `login` | ๋„์ปค ๋ ˆ์ง€์ŠคํŠธ๋ฆฌ์— ๋กœ๊ทธ์ธ | -u -p | +| `logout` | ๋„์ปค ๋ ˆ์ง€์ŠคํŠธ๋ฆฌ์— ๋กœ๊ทธ์•„์›ƒ | ๊ฑฐ์˜ ์‚ฌ์šฉํ•˜์ง€ ์•Š์Œ | +| `search` | ๋„์ปค ๋ ˆ์ง€์ŠคํŠธ๋ฆฌ๋ฅผ ๊ฒ€์ƒ‰ | ๊ฑฐ์˜ ์‚ฌ์šฉํ•˜์ง€ ์•Š์Œ | +| `version` | ๋„์ปค ์—”์ง„ ๋ฐ ๋ช…๋ นํ–‰ ๋„๊ตฌ์˜ ๋ฒ„์ „ ์ถœ๋ ฅ | ๊ฑฐ์˜ ์‚ฌ์šฉํ•˜์ง€ ์•Š์Œ | + + +# SECTION 3. ์ปจํ…Œ์ด๋„ˆ์˜ ์ƒ์„ฑ๊ณผ ์‚ญ์ œ, ์‹คํ–‰, ์ •์ง€ + +๋„์ปค ์ปค๋งจ๋“œ์—๋Š” ์ปจํ…Œ์ด๋„ˆ๋ฅผ ์ƒ์„ฑํ•˜๋Š” `docke (container) create`, ์‹คํ–‰ํ•˜๋Š” `docker (container) start`, ์ด๋ฏธ์ง€๋ฅผ ๋‚ด๋ ค๋ฐ›๋Š” `docker (container) pull` ์ด ๋”ฐ๋กœ ์กด์žฌํ•˜์ง€๋งŒ ์ด๋ฅผ ํ•œ ๋ฒˆ์— ์ˆ˜ํ–‰ํ•  ์ˆ˜ ์žˆ๋Š” `docker (container) run`์„ ์‚ฌ์šฉํ•˜๋Š” ๊ฒƒ์ด ์ผ๋ฐ˜์ ์ด๋‹ค. + +![](/posting_imgs/docker-lifecycle.png) + +2์žฅ์—์„œ ๋‚˜์™”๋˜ ์ƒ์• ์ฃผ๊ธฐ์— ๋”ฐ๋ผ ์ปจํ…Œ์ด๋„ˆ๋Š” ์“ฐ๊ณ  ๋ฒ„๋ฆฌ๋Š” ๋ฐฉ์‹์œผ๋กœ ์‚ฌ์šฉํ•œ๋‹ค. ๋™์ž‘ ์ค‘์ธ ์ปจํ…Œ์ด๋„ˆ๋ฅผ ๊ทธ๋Œ€๋กœ ์‚ญ์ œํ•  ์ˆ˜ ์—†์œผ๋ฏ€๋กœ ์ •์ง€ํ•˜๋Š” ๋ฐฉ๋ฒ•๊ณผ ์‚ญ์ œํ•˜๋Š” ๋ฐฉ๋ฒ•์„ ํ•จ๊ป˜ ์•Œ์•„์•ผ ํ•œ๋‹ค. + +## `docker (container) run` + +์ปจํ…Œ์ด๋„ˆ๋ฅผ ์ƒ์„ฑํ•ด ์‹คํ–‰ํ•œ๋‹ค. ํ•ด๋‹น ์ด๋ฏธ์ง€๋ฅด ๋‚ด๋ ค๋ฐ›์œผ ์ƒํƒœ๊ฐ€ ์•„๋‹ˆ๋ผ๋ฉด ๋จผ์ € ์ด๋ฏธ์ง€๋ฅผ ๋‚ด๋ ค๋ฐ›๋Š”๋‹ค. '๋Œ€์ƒ'์œผ๋กœ๋Š” ์ด๋ฏธ์ง€์˜ ์ด๋ฆ„์„ ์ง€์ •ํ•œ๋‹ค. + +| ์˜ต์…˜ ํ˜•์‹ | ๋‚ด์šฉ | Full name | +| :-------------------------------: | :---------------------------------------------------------------------------------------------- | :-------------: | +| `--name` | ์ปจํ…Œ์ด๋„ˆ ์ด๋ฆ„ ์ง€์ • | - | +| `-p {host_port}:{container_port}` | ํฌํŠธ ๋ฒˆํ˜ธ ์ง€์ • | `--publish` | +| `-v {host_disk}:{container_dir}` | ๋ณผ๋ฅจ ๋งˆ์šดํŠธ | `--volume` | +| `--net={natwork_name}` | ์ปจํ…Œ์ด๋„ˆ๋ฅผ ๋„คํŠธ์›Œํฌ์— ์—ฐ๊ฒฐ | `-env` | +| `-e {env-var_name}={vlaue}` | ํ™˜๊ฒฝ ๋ณ€์ˆ˜๋ฅผ ์„ค์ • | - | +| `-d` | ๋ฐฑ๊ทธ๋ผ์šด๋“œ๋กœ ์‹คํ–‰ | `--detach` | +| `-i` | ์ปจํ…Œ์ด๋„ˆ์—์„œ ํ„ฐ๋ฏธ๋„(ํ‚ค๋ณด๋“œ) ์—ฐ๊ฒฐ (ํ‘œ์ค€ ์ž…๋ ฅ์œผ๋กœ ์ปจํ…Œ์ด๋„ˆ ์ƒ์„ฑ) | `--interactive` | +| `-t` | ํŠน์ˆ˜ ํ‚ค๋ฅผ ์‚ฌ์šฉ ๊ฐ€๋Šฅํ•˜๋„๋ก ํ•จ (ํ„ฐ๋ฏธ๋„ ๋“œ๋ผ์ด๋ฒ„๋ฅผ ์ถ”๊ฐ€ํ•˜์—ฌ ํ„ฐ๋ฏธ๋„์„ ์ด์šฉํ•˜์—ฌ ์—ฐ๊ฒฐํ•  ์ˆ˜ ์žˆ๋„๋ก ํ•จ) | `--tty` | +| `-help` | ์‚ฌ์šฉ ๋ฐฉ๋ฒ• ์•ˆ๋‚ด ๋ฉ”์„ธ์ง€ | - | + +## `docker (container) stop` + +```bash +docker stop {container_name} +``` + +## `docker (container) rm` + +```bash +docker rm {container_name} +``` + +### ํ•œ ๋ฒˆ๋งŒ ์‹คํ–‰๋˜๋Š” ์ปจํ…Œ์ด๋„ˆ์™€ ๋ฐ๋ชฌ ํ˜•ํƒœ๋กœ ๋™์ž‘ํ•˜๋Š” ์ปจํ…Œ์ด๋„ˆ + +`-d`๋ฅผ ๋ถ™์ด์ง€ ์•Š๊ณ  ์ปจํ…Œ์ด๋„ˆ๋ฅผ ์‹ฑํ–‰ํ•˜๋ฉด ์‹คํ–‰๋œ ์ปจํ…Œ์ด๋„ˆ๊ฐ€ ํ”„๋กœ๊ทธ๋žจ์˜ ์‹คํ–‰์„ ๋งˆ์น  ๋•Œ๊นŒ์ง€ ํ„ฐ๋ฏธ๋„์˜ ์ œ์–ด๋ฅผ ์ฐจ์ง€ํ•˜๋ฏ€๋กœ ๋‹ค์Œ ๋ช…๋ น์„ ์ž…๋ ฅํ•  ์ˆ˜ ์—†๋‹ค. +`-it`๋ฅผ ๋ถ™์ด์ง€ ์•Š์œผ๋ฉด ์ปจํ…Œ์ด๋„ˆ ์•ˆ์˜ ํŒŒ์ผ ์‹œ์Šคํ…œ์ด ์ ‘๊ทผํ•  ์ˆ˜ ์—†๋‹ค. + +ํ•œ ๋ฒˆ๋งŒ ์‹คํ–‰๋˜๋Š” ์ปจํ…Œ์ด๋„ˆ๋Š” ์‹คํ–‰ํ•˜์ž๋งˆ์ž ์ข…๋ฃŒ๋˜๋ฏ€๋กœ ์ปจํ…Œ์ด๋„ˆ๊ฐ€ ํ„ฐ๋ฏธ๋„์˜ ์ œ์–ด๋ฅผ ์ฐจ์ง€ํ•ด๋„ ์ผ์‹œ์ ์ด๋ฏ€๋กœ ๋ฌธ์ œ๊ฐ€ ๋˜์ง€ ์•Š๋Š”๋‹ค. +ํ•˜์ง€๋งŒ ๋ฐ๋ชฌ์ฒ˜๋Ÿผ ์ง€์†์ ์œผ๋กœ ์‹คํ–‰๋˜๋Š” ๊ฒฝ์šฐ ์ €์ ˆ๋กœ ์ข…๋ฃŒ๋˜์ง€ ์•Š์œผ๋ฏ€๋กœ ํ•œ ๋ฒˆ ํ„ฐ๋ฏธ๋„ ์ œ์–ด๋ฅผ ๋„˜์ง€๋ฉด ์ด๋ฅผ ๋˜์ฐพ์•„์˜ค๊ธฐ ๋ฒˆ๊ฑฐ๋กญ๋‹ค. + +๋˜ํ•œ ๋ฐ”๋กœ ์ข…๋ฃŒ๋˜๋Š” ์ปจํ…Œ์ด๋„ˆ์˜ ๊ฒฝ์šฐ ์ปจํ…Œ์ด๋„ˆ ์† ํŒŒ์ผ ์‹œ์Šคํ…œ์— ์ ‘๊ทผํ•  ํ•„์š•์‚ฌ ์—†์œผ๋ฏ€๋กœ `-it`๊ฐ€ ํ•„์š”์—†๋Š” ์˜ต์…˜์ด ๋œ๋‹ค. + +## `docker ps (docker container ls)` + +```bash +# ์‹คํ–‰ ์ค‘์ธ ์ปจํ…Œ์ด๋„ˆ ํ™•์ธ +docker ps + +# ์กด์žฌํ•˜๋Š” ์ปจํ…Œ์ด๋„ˆ ํ™•์ธ (์ •์ง€๋œ ์ปจํ…Œ์ด๋„ˆ ํฌํ•จ) +docker ps -a +``` + +> `ps`๋Š” **pocess status**๋ฅผ ์˜๋ฏธํ•œ๋‹ค + +### ์ปจํ…Œ์ด๋„ˆ ๋ชฉ๋ก ์ •๋ณด + +๋ชฉ๋ก์„ ์ถœ๋ ฅํ•˜๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™์€ ์ •๋ณด๋“ค์ด ์ถœ๋ ฅ๋œ๋‹ค. +``` +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +``` + +์ž์„ธํ•œ ๋‚ด์šฉ์€ ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. + +| ํ•ญ๋ชฉ | ๋‚ด์šฉ | +| :------------: | :---------------------------------------------------------------------------------------------------------- | +| `CONTAINER ID` | ์ปจํ…Œ์ด๋„ˆ ์‹๋ณ„์ž, ๋ฌด์ž‘์œ„ ๋ฌธ์ž์—ด์ด ํ• ๋‹น๋œ๋‹ค. (SHA256 ํ•ด์‹œ ์•Œ๊ณ ๋ฆฌ์ฆ˜) | +| `IMAGE` | ์ปจํ…Œ์ด๋„ˆ๋ฅผ ๋งŒ๋“ค ๋•Œ ์‚ฌ์šฉํ•œ ์ด๋ฏธ์ง€์˜ ์ด๋ฆ„ | +| `COMMAND` | ์ปจํ…Œ์ด๋„ˆ ์‹คํ–‰ ์‹œ์— ์‹คํ–‰๋˜๋„๋ก ์„ค์ •ํ•œ ํ”„๋กœ๊ทธ๋žจ ์ด๋ฆ„ | +| `CREATED` | ์ปจํ…Œ์ด๋„ˆ ์ƒ์„ฑ ํ›„ ๊ฒฝ๊ณผ๋œ ์‹œ๊ฐ„ | +| `STATUS` | ์ปจํ…Œ์ด๋„ˆ์˜ ํ˜„์žฌ ์ƒํƒœ. ์‹คํ–‰ ์ค‘์ด๋ฉด 'Up', ์ข…๋ฃŒ ์ƒํƒœ์ด๋ฉด 'Exited' | +| `PORTS` | ์ปจํ…Œ์ด๋„ˆ์— ํ• ๋‹น๋œ ํฌํŠธ ๋ฒˆํ˜ธ 'host_port -> container_port'ํ˜•์‹์œผ๋กœ ์ถœ๋ ฅ. ํฌํŠธ ๋ฒˆํ˜ธ๊ฐ€ ๋™์ผํ•  ๊ฒฝ์šฐ ํ•˜๋‚˜๋งŒ ์ถœ๋ ฅ | +| `NAMES` | ์ปจํ…Œ์ด๋„ˆ ์ด๋ฆ„ | + + +#### SHA256 + +docker์—์„œ์˜ ์ž…๋ ฅ๊ฐ’ + +- ์ด๋ฏธ์ง€: + - ์ด๋ฏธ์ง€ ๋ ˆ์ด์–ด ์ •๋ณด + - ํŒŒ์ผ ๋ฐ ๋””๋ ‰ํ„ฐ๋ฆฌ์˜ ํ•ด์‹œ ๊ฐ’ + - Dockerfile ๋ช…๋ น๊ณผ ๋นŒ๋“œ ์˜ต์…˜ +- ์ปจํ…Œ์ด๋„ˆ: + - ๋žœ๋ค ๋ฐ์ดํ„ฐ(๋žœ๋ค ์‹œ๋“œ, ์‹œ๊ฐ„, ํ˜ธ์ŠคํŠธ ์ •๋ณด ๋“ฑ) + - ์ƒ์„ฑ ์‹œ ์˜ต์…˜๊ณผ ํ™˜๊ฒฝ ๋ณ€์ˆ˜ + + +SHA-256์€ **SHA-2(Secure Hash Algorithm 2)** ๊ณ„์—ด์˜ ์•”ํ˜ธํ•™์  ํ•ด์‹œ ํ•จ์ˆ˜๋กœ, ์ž…๋ ฅ ๋ฐ์ดํ„ฐ๋ฅผ ๊ณ ์ •๋œ 256๋น„ํŠธ(32๋ฐ”์ดํŠธ) ํ•ด์‹œ ๊ฐ’์œผ๋กœ ๋ณ€ํ™˜ํ•ฉ๋‹ˆ๋‹ค. ๋ฐ์ดํ„ฐ ๋ฌด๊ฒฐ์„ฑ ๊ฒ€์ฆ ๋ฐ ๊ณ ์œ  ์‹๋ณ„์ž ์ƒ์„ฑ ๋“ฑ์— ๋„๋ฆฌ ์‚ฌ์šฉ๋ฉ๋‹ˆ๋‹ค. + +--- + +##### **ํŠน์ง•** +1. **์ž…๋ ฅ ํฌ๊ธฐ ์ œํ•œ ์—†์Œ**: + - ์–ด๋–ค ํฌ๊ธฐ์˜ ๋ฐ์ดํ„ฐ๋„ ์ž…๋ ฅ์œผ๋กœ ์ฒ˜๋ฆฌ ๊ฐ€๋Šฅ. + +2. **์ถœ๋ ฅ ํฌ๊ธฐ ๊ณ ์ •**: + - ํ•ญ์ƒ **256๋น„ํŠธ(64์ž๋ฆฌ 16์ง„์ˆ˜)** ๊ธธ์ด์˜ ํ•ด์‹œ ๊ฐ’์„ ์ƒ์„ฑ. + +3. **๊ฒฐ์ •๋ก ์ **: + - ๋™์ผํ•œ ์ž…๋ ฅ์€ ํ•ญ์ƒ ๋™์ผํ•œ ํ•ด์‹œ ๊ฐ’์„ ์ƒ์„ฑ. + +4. **์ถฉ๋Œ ์ €ํ•ญ์„ฑ**: + - ์„œ๋กœ ๋‹ค๋ฅธ ์ž…๋ ฅ์ด ๋™์ผํ•œ ํ•ด์‹œ ๊ฐ’์„ ๊ฐ€์งˆ ํ™•๋ฅ ์ด ๊ทนํžˆ ๋‚ฎ์Œ. + +5. **๋น„๊ฐ€์—ญ์„ฑ**: + - ํ•ด์‹œ ๊ฐ’์œผ๋กœ ์›๋ณธ ๋ฐ์ดํ„ฐ๋ฅผ ๋ณต์› ๋ถˆ๊ฐ€. + +6. **๋น ๋ฅธ ๊ณ„์‚ฐ**: + - ์ž…๋ ฅ ๋ฐ์ดํ„ฐ ํฌ๊ธฐ์™€ ๊ด€๊ณ„์—†์ด ๋น ๋ฅด๊ฒŒ ๊ณ„์‚ฐ ๊ฐ€๋Šฅ. + +--- + +##### **์ฃผ์š” ์‚ฌ์šฉ ์‚ฌ๋ก€** +1. **๋ฐ์ดํ„ฐ ๋ฌด๊ฒฐ์„ฑ ๊ฒ€์ฆ**: + - ํŒŒ์ผ์ด ์ „์†ก ์ค‘ ๋ณ€๊ฒฝ๋˜์ง€ ์•Š์•˜๋Š”์ง€ ํ™•์ธ. + - ์˜ˆ: ์†Œํ”„ํŠธ์›จ์–ด ๋ฐฐํฌ ์‹œ ํŒŒ์ผ์˜ SHA-256 ํ•ด์‹œ ์ œ๊ณต. + +2. **์•”ํ˜ธํ•™**: + - ๋””์ง€ํ„ธ ์„œ๋ช…, ์ธ์ฆ์„œ, ์ „์ž ์„œ๋ช… ๋“ฑ์— ํ™œ์šฉ. + +3. **๋น„๋ฐ€๋ฒˆํ˜ธ ์ €์žฅ**: + - ๋น„๋ฐ€๋ฒˆํ˜ธ๋ฅผ ํ•ด์‹œ ๊ฐ’์œผ๋กœ ๋ณ€ํ™˜ํ•˜์—ฌ ์•ˆ์ „ํ•˜๊ฒŒ ์ €์žฅ. + - +4. **๋ธ”๋ก์ฒด์ธ**: + - Bitcoin ๋“ฑ ๋ธ”๋ก์ฒด์ธ ๊ธฐ์ˆ ์—์„œ ํŠธ๋žœ์žญ์…˜ ๋ฐ ๋ธ”๋ก ํ•ด์‹œ์— ์‚ฌ์šฉ. + +5. **๊ณ ์œ  ์‹๋ณ„์ž ์ƒ์„ฑ**: + - ํŒŒ์ผ, ๋ฐ์ดํ„ฐ, ์ด๋ฏธ์ง€ ๋“ฑ์—์„œ ๊ณ ์œ  ID ์ƒ์„ฑ. + +--- + +##### **SHA-256์˜ ๋™์ž‘ ์›๋ฆฌ** +1. **ํŒจ๋”ฉ(Padding)**: + - ์ž…๋ ฅ ๋ฐ์ดํ„ฐ๋ฅผ 512๋น„ํŠธ์˜ ๋ฐฐ์ˆ˜๋กœ ์ฑ„์›€. ๋งˆ์ง€๋ง‰ 64๋น„ํŠธ๋Š” ์›๋ž˜ ๋ฐ์ดํ„ฐ ๊ธธ์ด ์ •๋ณด ํฌํ•จ. + +2. **์ดˆ๊ธฐ ํ•ด์‹œ ๊ฐ’ ์„ค์ •**: + - 256๋น„ํŠธ ํฌ๊ธฐ์˜ ์ดˆ๊ธฐ ํ•ด์‹œ ๊ฐ’(8๊ฐœ์˜ 32๋น„ํŠธ ๋‹จ์–ด)์„ ์ •์˜. + +3. **๋ฉ”์‹œ์ง€ ๋ถ„ํ• **: + - ์ž…๋ ฅ ๋ฐ์ดํ„ฐ๋ฅผ 512๋น„ํŠธ ๋ธ”๋ก์œผ๋กœ ๋ถ„ํ• . + +4. **์••์ถ• ํ•จ์ˆ˜(Compression Function)**: + - ๊ฐ ๋ธ”๋ก์— ๋Œ€ํ•ด ๋น„ํŠธ ์—ฐ์‚ฐ์„ ๋ฐ˜๋ณตํ•˜์—ฌ ํ•ด์‹œ ๊ฐ’ ๊ฐฑ์‹ . + +5. **์ตœ์ข… ํ•ด์‹œ ๊ฐ’ ์ถœ๋ ฅ**: + - ๋ชจ๋“  ๋ธ”๋ก ์ฒ˜๋ฆฌ ํ›„, 256๋น„ํŠธ ์ตœ์ข… ํ•ด์‹œ ๊ฐ’ ๋ฐ˜ํ™˜. + +--- + +##### **Python์„ ์‚ฌ์šฉํ•œ SHA-256 ํ•ด์‹œ ์ƒ์„ฑ ์˜ˆ์ œ** +```python +import hashlib + +data = "hello" +sha256_hash = hashlib.sha256(data.encode()).hexdigest() +print("SHA-256 Hash:", sha256_hash) + +# SHA-256 Hash: 2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824 +``` +--- + + +## [์‹ค์Šต] ์ปจํ…Œ์ด๋„ˆ ์ƒ์„ฑ, ์‹คํ–‰ ์ƒํƒœํ™•์ธ, ์ข…๋ฃŒ, ์‚ญ์ œ + +```bash +PS C:\Users\Kwon> docker ps +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES + +# docker image pull + docker container create + docker container start +PS C:\Users\Kwon> docker run --name apa000ex1 -d httpd +latest: Pulling from library/httpd +334a67c7f78b: Download complete +3ed0d9182dde: Download complete +d675ed392a91: Download complete +0062038102c9: Download complete +4f4fb700ef54: Download complete +2d429b9e73a6: Download complete +Digest: sha256:6bdbdf5ac16ac3d6ef543a693fd5dfafae2428b4b0cdc52a480166603a069136 +Status: Downloaded newer image for httpd:latest +174a5c39573f4df13fad5620c4899bcde5828ff7b983e4456de688227e0ccecd + +# ์‹คํ–‰๋œ container ํ™•์ธ +PS C:\Users\Kwon> docker ps +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +174a5c39573f httpd "httpd-foreground" 12 seconds ago Up 11 seconds 80/tcp apa000ex1 + +# container ์ •์ง€ +PS C:\Users\Kwon> docker stop apa000ex1 +apa000ex1 + +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +# ์ •์ง€๋˜์—ˆ์œผ๋ฏ€๋กœ ps -a๋กœ ํ™•์ธํ•  ์ˆ˜ ์žˆ์Œ (STATUS: Exited) +PS C:\Users\Kwon> docker ps +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES + +PS C:\Users\Kwon> docker ps -a +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +174a5c39573f httpd "httpd-foreground" 3 minutes ago Exited (0) 12 seconds ago apa000ex1 + +# container ์‚ญ์ œ +PS C:\Users\Kwon> docker rm apa000ex1 +apa000ex1 + +PS C:\Users\Kwon> docker ps -a +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +``` + +# SECTION 4. ์ปจํ…Œ์ด๋„ˆ์˜ ํ†ต์‹  + +## ์•„ํŒŒ์น˜ + +> ์•„ํŒŒ์น˜๋Š” ์›น ์„œ๋ฒ„ ๊ธฐ๋Šฅ์„ ์ œ๊ณตํ•˜๋Š” ์†Œํ”„ํŠธ์›จ์–ด์ด๋‹ค. + +์›น ๋ธŒ๋ผ์šฐ์ €๋ฅผ ํ†ตํ•ด ๋“ค์–ด์˜จ ์š”์ฒญ์— ๋”ฐ๋ผ ์•„ํŒŒ์น˜ ์„œ๋ฒ„๊ฐ€ ์›น ์‚ฌ์ดํŠธ์˜ ๋‚ด์šฉ์„ ๋ฐ˜ํ™˜ํ•ด์ค€๋‹ค. + +## ์ปจํ…Œ์ด๋„ˆ์™€์˜ ํ†ต์‹  + +์›น ๋ธŒ๋ผ์šฐ์ €๋ฅผ ํ†ตํ•ด ์ปจํ…Œ์ด๋„ˆ์— ์ ‘๊ทผํ•˜๋ ค๋ฉด ์™ธ๋ถ€์™€ ์ ‘์†ํ•˜๊ธฐ ์œ„ํ•œ ์„ค์ •์ด ํ•„์š”ํ•˜๋‹ค. ์ด๋ฅผ ์œ„ํ•ด ํฌํŠธ๋ฅผ ์„ค์ •ํ•œ๋‹ค. + +> **port**๋Š” ํ†ต์‹  ๋‚ด์šฉ์ด ๋“œ๋‚˜๋“œ๋Š” ํ†ต๋กœ์ด๋‹ค. + +docker์—์„œ๋Š” `-p` ์˜ต์…˜์œผ๋กœ ์„ค์ •ํ•  ์ˆ˜ ์žˆ๋‹ค. + +์ปจํ…Œ์ด๋„ˆ๋ฅผ ์‚ฌ์šฉํ•˜๋ฉด ์—ฌ๋Ÿฌ ๊ฐœ์˜ ์›น ์„œ๋ฒ„๋ฅผ ํ•จ๊ป˜ ์‹คํ–‰ํ•  ์ˆ˜๋„ ์žˆ๋‹ค. ์ด๋Ÿฌํ•œ ๊ฒฝ์šฐ ์ปจํ…Œ์ด๋„ˆ์™€ ์—ฐ๊ฒฐํ•˜๋Š” ํ˜ธ์ŠคํŠธ์˜ ํฌํŠธ ๋ฒˆํ˜ธ๋ฅผ ๊ฒน์น˜์ง€ ์•Š๊ฒŒ ์„ฑ์ •ํ•ด์•ผ ํ•œ๋‹ค. + +### Reverse Proxy + +#### Forard Proxy + +client๊ฐ€ ์ง์ ‘ server์— ์š”์ฒญํ•˜๋Š” ๊ฒƒ์ด ์•„๋‹ˆ๋ผ proxy server(์ค‘๊ณ„ ์„œ๋ฒ„)๋ฅผ ๊ฑฐ์ณ ์š”์ฒญํ•˜๋Š” ๊ฒƒ. +์ด๋ ‡๊ฒŒ ๋˜๋ฉด ์‚ฌ์‹ค์ƒ proxy server๊ฐ€ ์š”์ฒญํ•˜๋Š” ๊ฒƒ์œผ๋กœ ๋˜๊ธฐ ๋•Œ๋ฌธ์— client๊ฐ€ ๋ˆ„๊ตฐ์ง€ server๋Š” ์•Œ ์ˆ˜ ์—†๋‹ค. + +#### Reverse Proxy + +reverse proxy๋Š” server๊ฐ€ ๋ฐ˜ํ™˜ํ•ด์ฃผ๋Š” data๋ฅผ proxy server๊ฐ€ ๋Œ€์‹  ํ•ด์ฃผ๋Š” ๊ฒƒ์ด๋‹ค. ๊ทธ๋Ÿฌ๋ฏ€๋กœ client๋Š” server์˜ ์ •๋ณด๋ฅผ ์•Œ ์ˆ˜ ์—†๋‹ค. +๋ณด์•ˆ์ƒ ์ด์ ์ด ์žˆ์œผ๋ฉฐ, ์„œ๋ฒ„ ๋ถ€๋‹ด์„ ๋ถ„์‚ฐํ•  ์ˆ˜ ์žˆ๋‹ค. + +![](/posting_imgs/proxy.png) + +reverse proxy๋ฅผ ํ™œ์šฉํ•˜๋ฉด ์—ฌ๋Ÿฌ ๋Œ€์˜ ์„œ๋ฒ„์— proxy๊ฐ€ ์š”์ฒญ์„ ์ „๋‹ฌ ํ•˜๊ฒŒ ํ•  ์ˆ˜ ์žˆ๋‹ค. ๊ทธ๋Ÿฌ๋ฏ€๋กœ ๊ฐ™์€ ํฌํŠธ๋กœ ์š”์ฒญ์ด ๋“ค์–ด์™”์„ ๋•Œ proxy๊ฐ€ ์„œ๋ฒ„๋ฅผ ๊ตฌ๋ถ„ํ•˜๋Š” ๋ฐฉ์‹์œผ๋กœ ์•Œ๋งž์€ ์ปจํ…Œ์ด๋„ˆ์— ์š”์ฒญ์„ ๋ณด๋‚ผ ์ˆ˜ ์žˆ๋‹ค. + +## [์‹ค์Šต] + +```bash +# host: 8080, container: 80์œผ๋กœ ์•„ํŒŒ์น˜ ์„œ๋ฒ„ container๋ฅผ ์‹คํ–‰ +PS C:\Users\Kwon> docker run --name apa000ex2 -d -p 8080:80 httpd +4ea55871346bea8332fdd1aee1c23620aff366895fbc573144b5cf4bd246d710 +PS C:\Users\Kwon> docker ps +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +4ea55871346b httpd "httpd-foreground" 20 seconds ago Up 19 seconds 0.0.0.0:8080->80/tcp apa000ex2 + +# ์ •์ง€ ๋ฐ ์‚ญ์ œ +PS C:\Users\Kwon> docker stop apa000ex2 +apa000ex2 +PS C:\Users\Kwon> docker rm apa000ex2 +apa000ex2 +PS C:\Users\Kwon> docker ps -a +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +PS C:\Users\Kwon> +``` + +![์‹ค์Šต ์‹คํ–‰ ํ™”๋ฉด](/posting_imgs/net_test.png) + +์œ„์™€ ๊ฐ™์€ ์‹คํ–‰ ํ™”๋ฉด์„ ์–ป์„ ์ˆ˜ ์žˆ๋‹ค. + +--- + +# SECTION 5. ์ปจํ…Œ์ด๋„ˆ ์ƒ์„ฑ์— ์ต์ˆ™ํ•ด์ง€๊ธฐ + +## ๋ฆฌ๋ˆ…์Šค ์šด์˜์ฒด์ œ๊ฐ€ ๋‹ด๊ธด ์ปจํ…Œ์ด๋„ˆ + +๋ฆฌ๋ˆ…์Šค ์šด์˜์ฒด์ œ ์ปจ๋ฐ์ด๋„ˆ๋Š” ์ปจํ…Œ์ด๋„ˆ ์† ํŒŒ์ผ ์‹œ์Šคํ…œ์„ ๋‹ค๋ฃจ๋Š” ๊ฒƒ์„ ์ „์ œ๋กœ ํ•˜๋ฏ€๋กœ ์ธ์ž๋กœ '์…€ ๋ช…๋ น์–ด'๋ฅผ ์ง€์ •ํ•œ๋‹ค. + +์ฆ‰ `-d` ์—†์ด `-it` ์˜ต์…˜๋งŒ ์‚ฌ์šฉํ•œ๋‹ค. ์ธ์ž๋กœ๋Š” `/bin/bash` ๋“ฑ ์…€ ๋ช…๋ น์–ด๋ฅผ ์ง€์ •ํ•œ๋‹ค. + +| ์ด๋ฏธ์ง€ ์ด๋ฆ„ | ์ปจํ…Œ์ด๋„ˆ์˜ ๋‚ด์šฉ | +| :---------: | :-------------: | +| ubuntu | ์šฐ๋ถ„ํˆฌ | +| centos | CentOS | +| edbian | ๋ฐ๋น„์•ˆ | +| fedora | ํŽ˜๋„๋ผ | +| busybox | BizyBox | +| alpine | ์•ŒํŒŒ์ธ ๋ฆฌ๋ˆ…์Šค | + +## ์›น ์„œ๋ฒ„/๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค ์„œ๋ฒ„์šฉ ์ปจํ…Œ์ด๋„ˆ + +์›น ์„œ๋ฒ„๋Š” ํ†ต์‹ ์ด ์ „์ œ๊ฐ€ ๋˜๋ฏ€๋กœ ์˜ต์…˜์„ ํ†ตํ•ด ํฌํŠธ ๋ฒˆํ˜ธ๋ฅผ ์ง€์ •ํ•ด์•ผ ํ•œ๋‹ค. + +๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค ๊ด€๋ฆฌ ์†Œํ”„ํŠธ์›จ์–ด๋Š” ๊ธฐ๋ณธ์ ์œผ๋กœ **๋ฃจํŠธ ํŒจ์Šค์›Œ๋“œ**๋ฅผ ๋ฐ˜๋“œ์‹œ ์ง€์ •ํ•ด์•ผ ํ•œ๋‹ค. + +| ์ด๋ฏธ์ง€ ์ด๋ฆ„ | ์ปจํ…Œ์ด๋„ˆ์˜ ๋‚ด์šฉ | ์ปจํ…Œ์ด๋„ˆ ์‹คํ–‰์— ์ฃผ๋กœ ์‚ฌ์šฉ๋˜๋Š” ์˜ต์…˜ ๋ฐ ์ธ์ž| +| :---------: | :-------------: |:---:| +| httpd | ์šฐ๋ถ„ํˆฌ |`-d` `-p`| +| nginx | CentOS |`-d` `-p`| +| mysql | ๋ฐ๋น„์•ˆ |`-d` `-e MYSQL_ROOT_PASSWORD`| +| postgres | Postgre |`-d` `-e POSTGRES_ROOT_PASSWORD`| +| mariadb | MariaDB |`-d` `-e MYSQL_ROOT_PASSWORD`| + +## ํ”„๋กœ๊ทธ๋žจ ์‹คํ–‰์„ ์œ„ํ•œ ๋Ÿฐํƒ€์ž„๊ณผ ๊ทธ ์™ธ ์†Œํ”„ํŠธ์›จ์–ด + +ํ”„๋กœ๊ทธ๋žจ์„ ์‹คํ–‰ํ•˜๋ ค๋ฉด ํ•ด๋‹น ์–ธ์–ด์˜ ์‹คํ–‰ ํ™˜๊ฒฝ์ธ **๋Ÿฐํƒ€์ž„**์ด ํ•„์š”ํ•˜๋‹ค. ์ด ๋˜ํ•œ ์ปจํ…Œ์ด๋„ˆ ํ˜•ํƒœ๋กœ ์ œ๊ณต๋œ๋‹ค. + +| ์ด๋ฏธ์ง€ ์ด๋ฆ„ | ์ปจํ…Œ์ด๋„ˆ์˜ ๋‚ด์šฉ | ์ปจํ…Œ์ด๋„ˆ ์‹คํ–‰์— ์ฃผ๋กœ ์‚ฌ์šฉ๋˜๋Š” ์˜ต์…˜ ๋ฐ ์ธ์ž| +| :---------: | :-------------: |:---:| +| openjdk | java ๋Ÿฐํƒ€์ž„ |`-d`๋ฅผ ์‚ฌ์šฉํ•˜์ง€ ์•Š๊ณ  ์ธ์ž๋กœ java ๋ช…๋ น ๋“ฑ์„ ์ง€์ •ํ•ด ๋„๊ตฌ ํ˜•ํƒœ๋กœ ์‚ฌ์šฉํ•œ๋‹ค.| +| python | python ๋Ÿฐํƒ€์ž„ |`-d`๋ฅผ ์‚ฌ์šฉํ•˜์ง€ ์•Š๊ณ  ์ธ์ž๋กœ python ๋ช…๋ น ๋“ฑ์„ ์ง€์ •ํ•ด ๋„๊ตฌ ํ˜•ํƒœ๋กœ ์‚ฌ์šฉํ•œ๋‹ค.| +| php | PHP ๋Ÿฐํƒ€์ž„ |์›น ์„œ๋ฒ„๊ฐ€ ํฌํ•จ๋œ ๊ฒƒ๊ณผ ์‹คํ–‰ ๋ช…๋ น๋งŒ ํฌํ•จ๋œ ๊ฒƒ์œผ๋กœ ๋‚˜์œ„์–ด ์ œ๊ณต๋œ๋‹ค.| +| ruby | ruby ๋Ÿฐํƒ€์ž„ |์›น ์„œ๋ฒ„๊ฐ€ ํฌํ•จ๋œ ๊ฒƒ๊ณผ ์‹คํ–‰ ๋ช…๋ น๋งŒ ํฌํ•จ๋œ ๊ฒƒ์œผ๋กœ ๋‚˜์œ„์–ด ์ œ๊ณต๋œ๋‹ค.| +| perl | perl ๋Ÿฐํƒ€์ž„ |`-d`๋ฅผ ์‚ฌ์šฉํ•˜์ง€ ์•Š๊ณ  ์ธ์ž๋กœ perl ๋ช…๋ น ๋“ฑ์„ ์ง€์ •ํ•ด ๋„๊ตฌ ํ˜•ํƒœ๋กœ ์‚ฌ์šฉํ•œ๋‹ค| +| gcc | C/C++ ์ปดํŒŒ์ผ๋Ÿฌ |`-d`๋ฅผ ์‚ฌ์šฉํ•˜์ง€ ์•Š๊ณ  ์ธ์ž๋กœ gcc ๋ช…๋ น ๋“ฑ์„ ์ง€์ •ํ•ด ๋„๊ตฌ ํ˜•ํƒœ๋กœ ์‚ฌ์šฉํ•œ๋‹ค| +| node | Node.js |`-d`๋ฅผ ์‚ฌ์šฉํ•˜์ง€ ์•Š๊ณ  ์ธ์ž๋กœ app ๋ช…๋ น ๋“ฑ์„ ์ง€์ •ํ•ด ๋„๊ตฌ ํ˜•ํƒœ๋กœ ์‚ฌ์šฉํ•œ๋‹ค| +| registry | ๋„์ปค ๋ ˆ์ง€์ŠคํŠธ๋ฆฌ |`-d`์˜ต์…˜์„ ์‚ฌ์šฉํ•ด ๋ฐฑ๊ทธ๋ผ์šด๋“œ๋กœ ์‹คํ–‰ํ•œ๋‹ค. `-p` ์˜ต์…˜์œผ๋กœ ํฌํŠธ ๋ฒˆํ˜ธ๋ฅผ ์ง€์ •ํ•œ๋‹ค.| +| wordpress | WordPress |`-d`์˜ต์…˜์„ ์‚ฌ์šฉํ•ด ๋ฐฑ๊ทธ๋ผ์šด๋“œ๋กœ ์‹คํ–‰ํ•œ๋‹ค. `-p` ์˜ต์…˜์œผ๋กœ ํฌํŠธ ๋ฒˆํ˜ธ๋ฅผ ์ง€์ •ํ•œ๋‹ค. MySQL ๋˜๋Š” MariaDB๊ฐ€ ํ•„์š”ํ•˜๋‹ค. ์ ‘์†์— ํ•„์š”ํ•œ ํŒจ์Šค์›Œ๋“œ๋Š” `-e` ์˜ต์…˜์œผ๋กœ ์ง€์ •ํ•œ๋‹ค.| +| nextcloud | NextCloud |`-d`์˜ต์…˜์„ ์‚ฌ์šฉํ•ด ๋ฐฑ๊ทธ๋ผ์šด๋“œ๋กœ ์‹คํ–‰ํ•œ๋‹ค. `-p` ์˜ต์…˜์œผ๋กœ ํฌํŠธ ๋ฒˆํ˜ธ๋ฅผ ์ง€์ •ํ•œ๋‹ค.| +| redmine | Redmine |`-d`์˜ต์…˜์„ ์‚ฌ์šฉํ•ด ๋ฐฑ๊ทธ๋ผ์šด๋“œ๋กœ ์‹คํ–‰ํ•œ๋‹ค. `-p` ์˜ต์…˜์œผ๋กœ ํฌํŠธ ๋ฒˆํ˜ธ๋ฅผ ์ง€์ •ํ•œ๋‹ค. PostgreSQL ๋˜๋Š” MySQL์ด ํ•„์š”ํ•˜๋‹ค| + +## [์‹ค์Šต] ์•„ํŒŒ์น˜ ์ปจํ…Œ์ด๋„ˆ๋ฅผ ์—ฌ๋Ÿฌ ๊ฐœ ์‹คํ–‰ํ•˜๊ธฐ + +```bash +# ์—ฌ๋Ÿฌ ๊ฐœ์˜ ์•„ํŒŒ์น˜ container ์‹คํ–‰ +PS C:\Users\Kwon> docker run --name apa000ex3 -d -p 8081:80 httpd +e21699803316c50412fefe15c8083f002b13f6a77a3fe44425f64df804622b0e +PS C:\Users\Kwon> docker run --name apa000ex4 -d -p 8082:80 httpd +fcfaac356cc40277c0855211b0394593db58bbae4c5e70bfb4cf60f10a47fdcb +PS C:\Users\Kwon> docker run --name apa000ex5 -d -p 8083:80 httpd +d656af9f2f9bc5c7326efe9c765888586c3564022b13ab14cd8348c8187c5460 + +PS C:\Users\Kwon> docker ps +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +d656af9f2f9b httpd "httpd-foreground" 10 seconds ago Up 10 seconds 0.0.0.0:8083->80/tcp apa000ex5 +fcfaac356cc4 httpd "httpd-foreground" 16 seconds ago Up 16 seconds 0.0.0.0:8082->80/tcp apa000ex4 +e21699803316 httpd "httpd-foreground" 22 seconds ago Up 22 seconds 0.0.0.0:8081->80/tcp apa000ex3 + +# container ์ค‘์ง€ +PS C:\Users\Kwon> docker stop apa000ex3 +apa000ex3 +PS C:\Users\Kwon> docker stop apa000ex4 +apa000ex4 +PS C:\Users\Kwon> docker stop apa000ex5 +apa000ex5 + +PS C:\Users\Kwon> docker ps -a +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +d656af9f2f9b httpd "httpd-foreground" 56 seconds ago Exited (0) 10 seconds ago apa000ex5 +fcfaac356cc4 httpd "httpd-foreground" About a minute ago Exited (0) 13 seconds ago apa000ex4 +e21699803316 httpd "httpd-foreground" About a minute ago Exited (0) 17 seconds ago apa000ex3 + +# cooontainer ์‚ญ์ œ +PS C:\Users\Kwon> docker rm apa000ex3 +apa000ex3 +PS C:\Users\Kwon> docker rm apa000ex4 +apa000ex4 +PS C:\Users\Kwon> docker rm apa000ex5 +apa000ex5 + +PS C:\Users\Kwon> docker ps -a +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +``` + +## [์‹ค์Šต] Nginx ์ปจํ…Œ์ด๋„ˆ ์‹คํ–‰ + +```bash +# Nginx container ์‹คํ–‰ +PS C:\Users\Kwon> docker run --name nginx000ex6 -d -p 8084:80 nginx +Unable to find image 'nginx:latest' locally +latest: Pulling from library/nginx +171eebbdf235: Download complete +9ad567d3b8a2: Download complete +9b1039c85176: Download complete +773c63cd62e4: Download complete +4b0adc47c460: Download complete +1d2712910bdf: Download complete +Digest: sha256:bc5eac5eafc581aeda3008b4b1f07ebba230de2f27d47767129a6a905c84f470 +Status: Downloaded newer image for nginx:latest +eb1e2ba350487505a13e5eda7c72690ca7e999d97d811635d5045c0495e6223f + +PS C:\Users\Kwon> docker ps +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +eb1e2ba35048 nginx "/docker-entrypoint.โ€ฆ" 11 seconds ago Up 9 seconds 0.0.0.0:8084->80/tcp nginx000ex6 + +# container ์ค‘์ง€/์‚ญ์ œ +PS C:\Users\Kwon> docker stop nginx000ex6 +nginx000ex6 + +PS C:\Users\Kwon> docker ps -a +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +eb1e2ba35048 nginx "/docker-entrypoint.โ€ฆ" 33 seconds ago Exited (0) 5 seconds ago nginx000ex6 + +PS C:\Users\Kwon> docker rm nginx000ex6 +nginx000ex6 +``` + +## [์‹ค์Šต] MySQL ์ปจํ…Œ์ด๋„ˆ ์‹คํ–‰ + +```bash +# MySQL container ์‹คํ–‰ +PS C:\Users\Kwon> docker run --name mysql000ex7 -dit -e MYSQL_ROOT_PASSWORD=1234 mysql +Unable to find image 'mysql:latest' locally +latest: Pulling from library/mysql +7030c241d9b8: Pulling fs layer +f1a9f94fc2db: Download complete +c0fb96d14e5b: Download complete +f98254a2b688: Download complete +5f31e56c9bea: Download complete +d57074c62694: Download complete +6ad83e89f981: Download complete +a42d733ea779: Download complete +6fd1af2601dd: Download complete +0233a63dc5cd: Download complete +Digest: sha256:2be51594eba5983f47e67ff5cb87d666a223e309c6c64450f30b5c59a788ea40 +Status: Downloaded newer image for mysql:latest +58f387c6520598518264e9f0a70ac6e6d9830bf381157aa96063f9eccfc867e6 + +PS C:\Users\Kwon> docker ps +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +58f387c65205 mysql "docker-entrypoint.sโ€ฆ" 20 minutes ago Up 20 minutes 3306/tcp, 33060/tcp mysql000ex7 + +# container ์ค‘์ง€/์‚ญ์ œ +PS C:\Users\Kwon> docker stop mysql000ex7 +mysql000ex7 +PS C:\Users\Kwon> docker rm mysql000ex7 +mysql000ex7 +``` + +# SECTION 6. ์ด๋ฏธ์ง€ ์‚ญ์ œ + +container๋ฅผ ์—ฌ๋Ÿฌ ๋ฒˆ ๋งŒ๋“ค๋‹ค ๋ณด๋ฉด image๋Š” ๊ทธ๋Œ€๋กœ ๋‚จ์•„์žˆ๋Š” ๋ฌธ์ œ๊ฐ€ ๋ฐœ์ƒํ•œ๋‹ค. +ํ•ด๋‹น ์ด๋ฏธ์ง€๋กœ ์‹คํ–‰ํ•œ container๊ฐ€ ๋‚จ์•„ ์žˆ์œผ๋ฉด image๋ฅผ ์‚ญ์ œํ•  ์ˆ˜ ์—†์œผ๋ฏ€๋กœ ์‚ฌ์ „์— container๋ฅผ ์ค‘์ง€ ๋ฐ ์‚ญ์ œํ•œ๋‹ค. + +## `docker image rm` + +์—ฌ๋Ÿฌ ๊ฐœ๋ฅผ ํ•œ ๋ฒˆ์— ์‚ญ์ œํ•  ์ˆ˜๋„ ์žˆ๋‹ค. + +```bash +docker image rm image1 image2 image3 +``` + +## `docker image ls` + +image๋ฅผ ์‚ญ์ œํ•˜๋ ค๋ฉด imgae ์ด๋ฆ„ ๋˜๋Š” id๋ฅผ ์•Œ์•„์•ผ ํ•œ๋‹ค. + +coantainer ๋ชฉ๋ก์„ ๋ถˆ๋Ÿฌ์˜ค๋Š” `docker ps`์™€ ๊ฐ™์ด `docker container ls`๋กœ image ๋ชฉ๋ก์„ ๋ถˆ๋Ÿฌ์˜ฌ ์ˆ˜ ์žˆ๋‹ค. +์ถ•์•ฝํ˜•์€ `docker ls`์ด๋‹ค. + +```sh +PS C:\Users\Kwon> docker image ls +REPOSITORY TAG IMAGE ID CREATED SIZE +mysql latest 2be51594eba5 4 weeks ago 825MB +nginx latest bc5eac5eafc5 6 weeks ago 279MB +httpd latest 6bdbdf5ac16a 4 months ago 221MB +``` + +### image version + +`image_name:version`์œผ๋กœ ์ด๋ฏธ์ง€์˜ ๋ฒ„์ „์„ ์ง€์ •ํ•  ์ˆ˜ ์žˆ๋‹ค. + +```sh +# ์•„ํŒŒ์น˜ 2.2 ๋ฒ„์ „์„ ์ง€์ •ํ•ด ์‹คํ–‰ +docker run --name apa000ex2 -d -p 8080:80 httpd:2.2 +``` + +## [์‹ค์Šต] ์ด๋ฏธ์ง€ ์‚ญ์ œ + +```sh +PS C:\Users\Kwon> docker image ls +REPOSITORY TAG IMAGE ID CREATED SIZE +mysql latest 2be51594eba5 4 weeks ago 825MB +nginx latest bc5eac5eafc5 6 weeks ago 279MB +httpd latest 6bdbdf5ac16a 4 months ago 221MB + +# ์ถ•์•ฝํ˜• +PS C:\Users\Kwon> docker rmi httpd +Untagged: httpd:latest +Deleted: sha256:6bdbdf5ac16ac3d6ef543a693fd5dfafae2428b4b0cdc52a480166603a069136 + +PS C:\Users\Kwon> docker image ls +REPOSITORY TAG IMAGE ID CREATED SIZE +mysql latest 2be51594eba5 4 weeks ago 825MB +nginx latest bc5eac5eafc5 6 weeks ago 279MB + +# image rm์œผ๋กœ ์‚ญ์ œ +PS C:\Users\Kwon> docker image rm nginx mysql +Untagged: nginx:latest +Deleted: sha256:bc5eac5eafc581aeda3008b4b1f07ebba230de2f27d47767129a6a905c84f470 +Untagged: mysql:latest +Deleted: sha256:2be51594eba5983f47e67ff5cb87d666a223e309c6c64450f30b5c59a788ea40 + +PS C:\Users\Kwon> docker image ls +REPOSITORY TAG IMAGE ID CREATED SIZE +``` \ No newline at end of file diff --git a/_posts/2024-12-09-dockerCH5.markdown b/_posts/2024-12-09-dockerCH5.markdown new file mode 100644 index 00000000000..899e6ec8c02 --- /dev/null +++ b/_posts/2024-12-09-dockerCH5.markdown @@ -0,0 +1,158 @@ +--- +title: "CH5. ์—ฌ๋Ÿฌ ๊ฐœ์˜ ์ปจํ…Œ์ด๋„ˆ๋ฅผ ์—ฐ๋™ํ•ด ์‹คํ–‰ํ•ด๋ณด์ž [๊ทธ๋ฆผ์œผ๋กœ ๋ฐฐ์šฐ๋Š” ๋„์ปค & ์ฟ ๋ฒ„๋„คํ‹ฐ์Šค]" +author: kwon +date: 2024-12-09T14:00:00 +0900 +categories: [๋„์ปค] +tags: [docker] +math: true +mermaid: false +--- + +# SECTION 1. ์›Œ๋“œํ”„๋ ˆ์Šค ๊ตฌ์ถ• + +## ์›Œ๋“œํ”„๋ ˆ์Šค ๊ตฌ์ถ• + +์›Œ๋“œํ”„๋ ˆ์Šค๋Š” ์›น ์‚ฌ์ดํŠธ๋ฅผ ๋งŒ๋“ค๊ธฐ ์œ„ํ•œ ์†Œํ”„ํŠธ์›จ์–ด๋กœ, ์•„ํŒŒ์น˜๋‚˜ ๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค, PHP ๋Ÿฐํƒ€์ž„ ๋“ฑ์„ ํ•„์š”๋กœ ํ•˜๊ธฐ ๋•Œ๋ฌธ์— ๊ตฌ์ถ•์„ ์œ„ํ•œ ์—ฐ์Šต ์†Œ์žฌ๋กœ ์ข‹๋‹ค. + +์›Œ๋“œํ”„๋ ˆ์Šค๋Š” ์›Œ๋“œํ”„๋ ˆ์Šค ์ปจํ…Œ์ด๋„ˆ์™€ MySQL ์ปจํ…Œ์ด๋„ˆ๋กœ ๊ตฌ์„ฑ๋œ๋‹ค. +์›Œ๋“œํ”„๋ ˆ์Šค๋Š” ๋ธ”๋กœ๊ทธ ์ƒ์„ฑ ๋„๊ตฌ์™€ ๊ฐ™์€ ๊ฒƒ์ด๋ฏ€๋กœ, ํ”„๋กœ๊ทธ๋žจ์ด MySQL์— ์ €์žฅ๋œ ๋ฐ์ดํ„ฐ๋ฅผ ์ฝ๊ณ  ์“ธ ์ˆ˜ ์žˆ์–ด์•ผ ํ•œ๋‹ค. +๋•Œ๋ฌธ์— ๋‘ ์ปจํ…Œ์ด๋„ˆ๊ฐ€ ์—ฐ๊ฒฐ๋ผ ์žˆ์–ด์•ผ ํ•œ๋‹ค. + +๋‘ ์ปจํ…Œ์ด๋„ˆ๋ฅผ ์—ฐ๊ฒฐํ•˜๊ธฐ ์œ„ํ•ด์„œ๋Š” ๊ฐ€์ƒ ๋„คํŠธ์›Œํฌ๋ฅผ ๋งŒ๋“ค์–ด ๋‘ ๊ฐœ์˜ ์ปจํ…Œ์ด๋„ˆ๋ฅผ ์†Œ์†์‹œ์ผœ ์—ฐ๊ฒฐํ•œ๋‹ค. +๋„คํŠธ์›Œํฌ์— ๋Œ€ํ•œ ๊ธฐ๋ณธ ๋ช…๋ น์–ด๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. +```sh +# ์ƒ์„ฑ +docker network create +# ์‚ญ์ œ +docker network rm +# ์กฐํšŒ +docker network ls +# ์ ‘์† +docker network connect +# ์ ‘์† ๋Š๊ธฐ +docker network disconnect +# ์ƒ์„ธ ์ •๋ณด +docker network isnpect +# ์•„๋ฌด๋„ ์ ‘์†ํ•˜์ง€ ์•Š์€ ๋„คํŠธ์›Œํฌ ์‚ญ์ œ +docker network prune +``` + +### MySQL ์‹คํ–‰ ์‹œ ํ•„์š”ํ•œ ์˜ต์…˜๊ณผ ์ธ์ž + +```sh +docker run --name -dit --net= -e MYSQL_ROOT_PASSWORD= MYSQL_DATABASE= -e MYSQL_USER= -e MYSQL_PASSWORD= mysql --charactoer-set-server=<๋ฌธ์ž์ธ์ฝ”๋”ฉ> --collation-server=<์ •๋ ฌ์ˆœ์„œ> --default-authentication-plugin=<์ธ์ฆ๋ฐฉ์‹> +``` + +๋ญ”๊ฐ€ ์—„์ฒญ ๋งŽ์•„ ๋ณด์ด์ง€๋งŒ ๋ชจ๋‘ ๊ฑฐ์˜ ํ™˜๊ฒฝ๋ณ€์ˆ˜ ์„ค์ •์ด๋‹ค. +#### ์˜ต์…˜ + +| ํ•ญ๋ชฉ | ์˜ต์…˜ | +| :----------------------- | :----------------------- | +| ๋„คํŠธ์›Œํฌ ์ด๋ฆ„ | `--net` | +| ์ปจํ…Œ์ด๋„ˆ ์ด๋ฆ„ | `--name` | +| MySQL ๋ฃจํŠธ ํŒจ์Šค์›Œ๋“œ | `-e MYSQL_ROOT_PASSWORD` | +| MySQL ๋ฐ์ดํ„ฐ ๋ฒ ์ด์Šค ์ด๋ฆ„ | `-e MYSQL_DATABASE` | +| MySQL ์‚ฌ์šฉ์ž ์ด๋ฆ„ | `-e MYSQL_USER` | +| MySQL ํŒจ์Šค์›Œ๋“œ | `-e MYSQL_PASSWORD` | + +#### ์ธ์ž + +| ํ•ญ๋ชฉ | ๊ฐ’ | +| :--------- | :------------------------------- | +| ๋ฌธ์ž ์ธ์ฝ”๋”ฉ | `--charactoer-set-server` | +| ์ •๋ ฌ ์ˆœ์„œ | `--collation-server` | +| ์ธ์ฆ ๋ฐฉ์‹ | `--default-authentication-plugin` | + + +### ์›Œ๋“œํ”„๋ ˆ์Šค ์ปจํ…Œ์ด๋„ˆ ์‹คํ–‰ ์‹œ ํ•„์š”ํ•œ ์˜ต์…˜๊ณผ ์ธ์ž + +```sh +docker run --name -dit --net= -p -e ... wordpress +``` + +#### ์˜ต์…˜ + +์—ฌ๊ธฐ๋„ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ๋Š” ์˜ต์…˜์ด ๋งŽ๋‹ค. + +| ํ•ญ๋ชฉ | ๊ฐ’ | +| :-------------------------- | :-------------------------- | +| ๋ฐ์ดํ„ฐ ๋ฒ ์ด์Šค ์ปจํ…Œ์ด๋„ˆ ์ด๋ฆ„ | `-e WORDPRESS_DB_HOST` | +| ๋ฐ์ดํ„ฐ ๋ฒ ์ด์Šค ์ด๋ฆ„ | `-e WORDPRESS_DB_NAME` | +| ๋ฐ์ดํ„ฐ ๋ฒ ์ด์Šค ์‚ฌ์šฉ์ž ์ด๋ฆ„ | `-e WORDPRESS_DB_USER` | +| ๋ฐ์ดํ„ฐ ๋ฒ ์ด์Šค ๋น„๋ฐ€๋ฒˆํ˜ธ | `-e WORDPRESS_DB_PASSSWORD` | + +--- +# SECTION 2. ์›Œ๋“œํ”„๋ ˆ์Šค ๋ฐ MySQL ์ปจํ…Œ์ด๋„ˆ ์ƒ์„ฑ๊ณผ ์—ฐ๋™ + +```sh +C:\Users\Kwon>docker run --name mysql000ex11 -dit --net=wp -e MYSQL_ROOT_PASSWORD=1234 -e MYSQL_DATABASE=wordpress---db -e MYSQL_USER=wordpress000user -e MYSQL_PASSWORD=1234 mysql --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci --default-authentication-plugin=mysql_native_password +853dd040ade1fef5532d8429504d7a1b2ab3f0b3cb6f8ed635918d4df773b1b6 + +C:\Users\Kwon>docker run --name wordpress000ex12 -dit --net=wp -p 8085:80 -e WORDPRESSS_DB_HOST=mysql000ex11 -e WORDPRES +S_DB_NAME=wordpress000db -e WORDPRESS_DB_USER=wordpress000user -e WORDPRESS_DB_PASSWORD=1234 wordpress +aef41f135fbdba00fe389a0f882e52aaa05f24f638fbf7ce3277dfc964ea5e79 + +C:\Users\Kwon>docker ps +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +aef41f135fbd wordpress "docker-entrypoint.sโ€ฆ" 24 seconds ago Up 21 seconds 0.0.0.0:8085->80/tcp wordpress000ex12 +``` + +์ฑ…๊ณผ ๋˜‘๊ฐ™์ด ์ž‘์„ฑํ–ˆ๋Š”๋ฐ๋„ mysql์ด ์‹คํ–‰์ด ์ •์ƒ์ ์œผ๋กœ ๋˜์ง€ ์•Š์•„ log๋ฅผ ํ™•์ธํ•ด๋ดค๋‹ค. + +```sh +C:\Users\Kwon>docker logs mysql000ex11 +2024-12-09 06:59:03+00:00 [Note] [Entrypoint]: Entrypoint script for MySQL Server 9.1.0-1.el9 started. +2024-12-09 06:59:03+00:00 [Note] [Entrypoint]: Switching to dedicated user 'mysql' +2024-12-09 06:59:03+00:00 [Note] [Entrypoint]: Entrypoint script for MySQL Server 9.1.0-1.el9 started. +2024-12-09 06:59:03+00:00 [Note] [Entrypoint]: Initializing database files +2024-12-09T06:59:03.712828Z 0 [System] [MY-015017] [Server] MySQL Server Initialization - start. +2024-12-09T06:59:03.715059Z 0 [System] [MY-013169] [Server] /usr/sbin/mysqld (mysqld 9.1.0) initializing of server in progress as process 80 +2024-12-09T06:59:03.733272Z 1 [System] [MY-013576] [InnoDB] InnoDB initialization has started. +2024-12-09T06:59:04.187540Z 1 [System] [MY-013577] [InnoDB] InnoDB initialization has ended. +2024-12-09T06:59:05.435282Z 0 [ERROR] [MY-000067] [Server] unknown variable 'default-authentication-plugin=mysql_native_password'. +2024-12-09T06:59:05.435689Z 0 [ERROR] [MY-013236] [Server] The designated data directory /var/lib/mysql/ is unusable. You can remove all files that the server added to it. +2024-12-09T06:59:05.435725Z 0 [ERROR] [MY-010119] [Server] Aborting +2024-12-09T06:59:06.816914Z 0 [System] [MY-015018] [Server] MySQL Server Initialization - end. +``` + +~~`default-authentication-plugin`์ด ์—†๋‹จ๋‹ค. ์—ฌ๊ธฐ์ €๊ธฐ ์ฐพ์•„๋ดค์ง€๋งŒ ์•„์ง ๋‹ต์„ ์ฐพ์ง€ ๋ชปํ•ด ์ผ๋‹จ ์ œ๊ฑฐํ•˜๊ณ  ์ง„ํ–‰ํ•œ๋‹ค.~~ + +```sh +C:\Users\Kwon>docker run --name mysql000ex11 -dit --net=wp -e MYSQL_ROOT_PASSWORD=1234 -e MYSQL_DATABASE=wordpress---db +-e MYSQL_USER=wordpress000user -e MYSQL_PASSWORD=1234 mysql:8 --character-set-server=utf8mb4 --collation-server=utf8mb4_ +unicode_ci --default-authentication-plugin=mysql_native_password +686dada7cdad34fca5dbf41bd22e4f870a449293a9bccdcaf56f883f22ce2775 + +C:\Users\Kwon>docker ps +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +686dada7cdad mysql:8 "docker-entrypoint.sโ€ฆ" 8 seconds ago Up 5 seconds 3306/tcp, 33060/tcp mysql000ex11 +``` +์ฑ…๊ณผ ๋ฒ„์ „์„ ๋งž์ถฐ์ฃผ๋‹ˆ ๋œ๋‹ค. mysql 8 ๋ฒ„์ „์œผ๋กœ ์ง„ํ–‰ํ•ด์•ผ ํ•œ๋‹ค. + +```sh +C:\Users\Kwon>docker run --name mysql000ex11 -dit --net=wp -e MYSQL_ROOT_PASSWORD=1234 -e MYSQL_DATABASE=wordpress---db -e MYSQL_USER=wordpress000user -e MYSQL_PASSWORD=1234 mysql --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci +bd36ad18cacf2d5cb868c6007b3b73272477a5dc39e9b52e374fdd84cb1cd51b + +C:\Users\Kwon>docker ps +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +bd36ad18cacf mysql "docker-entrypoint.sโ€ฆ" 11 seconds ago Up 11 seconds 3306/tcp, 33060/tcp mysql000ex11 +aef41f135fbd wordpress "docker-entrypoint.sโ€ฆ" 41 minutes ago Up 41 minutes 0.0.0.0:8085->80/tcp wordpress000ex12 +``` + +์ •์ƒ์ ์œผ๋กœ ์‹คํ–‰๋˜๋Š” ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. +--- + +# SECTION 3. ๋ช…๋ น์–ด๋ฅผ ์ง์ ‘ ์ž‘์„ฑํ•˜์ž + +## SW์™€ DB์˜ ๊ด€๊ณ„ + +์›Œ๋“œํ”„๋ ˆ์Šค๋ฅผ ์‚ฌ์šฉํ•˜๋ ค๋ฉด ์›Œ๋“œํ”„๋ ˆ์Šค ํ”„๋กœ๊ทธ๋žจ ์™ธ์—๋„ ์•„ํŒŒ์น˜์™€ PHP ๋Ÿฐํƒ€์ž„, MySQL์ด ํ•„์š”ํ–ˆ๋‹ค. +์›Œ๋“œํ”„์—์Šค ์™ธ์—๋„ ์ด๋Ÿฌํ•œ ํ˜•์‹์œผ๋กœ ๊ตฌ์„ฑ๋˜๋Š” ์›น ์‹œ์Šคํ…œ์ด ๋งŽ๋‹ค. + +ํŠนํžˆ ์•„ํŒŒ์น˜, PHP, MySQL์— ๋ฆฌ๋ˆ…์Šค๋ฅผ ํ•ฉ์นœ ์กฐํ•ฉ์€ **LAMP ์Šคํƒ**์ด๋ผ๊ณ  ๋ถ€๋ฅธ๋‹ค. +SW๊ฐ€ ๋ฐœ์ „ํ•˜๋ฉด์„œ ์•„ํŒŒ์น˜๊ฐ€ nginx๋กœ ๋ฐ”๋€Œ๊ธฐ๋„ ํ•˜๊ณ , MySQL์ด MariaDB๋‚˜ PosetgreSQL๋กœ ๋ฐ”๋€ ์กฐํ•ฉ๋„ ๋‚˜ํƒ€๋‚ฌ์ง€๋งŒ +'**๋ฆฌ๋ˆ…์Šค + ์›น์„œ๋ฒ„ + ํ”„๋กœ๊ทธ๋ž˜๋ฐ ์–ธ์–ด ๋Ÿฐํƒ€์ž„ + ๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค**'์˜ ์กฐํ•ฉ์ž„์€ ๋ณ€ํ•จ์ด ์—†๋‹ค. + +๋”ฐ๋ผ์„œ ์ปจํ…Œ์ด๋„ˆ๋„ 'ํ”„๋กœ๊ทธ๋žจ ๋ณธ์ฒด + ํ”„๋กœ๊ทธ๋žจ ๋Ÿฐํƒ€์ž„ + ์›น ์„œ๋ฒ„' ์ปจํ…Œ์ด๋„ˆ์™€ '๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค' ์ปจํ…Œ์ด๋„ˆ๋กœ ๊ตฌ์„ฑํ•ด ์šด๋ นํ•˜๋Š” ์‚ฌ๋ก€๋ฅผ ํ”ํžˆ ๋ณผ ์ˆ˜ ์žˆ๋‹ค. + +๊ธ€์ด ๊ธธ์–ด์ง์— ๋”ฐ๋ผ ํฌ์ŠคํŒ…์—์„œ๋Š” ์‹ค์Šต์„ ์ œ์™ธํ•˜๋„๋ก ํ•œ๋‹ค. ์›Œ๋“œํ”„๋ ˆ์Šค - MySQL๊ณผ ๊ฑฐ์˜ ๋น„์Šทํ•œ ๊ณผ์ •์œผ๋กœ ์ง„ํ–‰๋œ๋‹ค. \ No newline at end of file diff --git a/_posts/2024-12-12-dockerCH6.markdown b/_posts/2024-12-12-dockerCH6.markdown new file mode 100644 index 00000000000..4dd37bc60e4 --- /dev/null +++ b/_posts/2024-12-12-dockerCH6.markdown @@ -0,0 +1,272 @@ +--- +title: "CH6. ์ปจํ…Œ์ด๋„ˆ์™€ ํ˜ธ์ŠคํŠธ ๊ฐ„์— ํŒŒ์ผ ๋ณต์‚ฌํ•˜๊ธฐ [๊ทธ๋ฆผ์œผ๋กœ ๋ฐฐ์šฐ๋Š” ๋„์ปค & ์ฟ ๋ฒ„๋„คํ‹ฐ์Šค]" +author: kwon +date: 2024-12-12T14:00:00 +0900 +categories: [๋„์ปค] +tags: [docker] +math: true +mermaid: false +--- + +# SECTION 2. ์ปจํ…Œ์ด๋„ˆ์™€ ํ˜ธ์ŠคํŠธ ๊ฐ„์— ํŒŒ์ผ ๋ณต์‚ฌํ•˜๊ธฐ + +## ํŒŒ์ผ ๋ณต์‚ฌ + +> ํ”„๋กœ๊ทธ๋žจ๋งŒ์œผ๋กœ ๊ตฌ์„ฑ๋œ ์‹œ์Šคํ…œ์€ ๊ทธ๋ฆฌ ๋งŽ์ง€ ์•Š๋‹ค. + +5์žฅ์—์„œ๋„ ๋‚˜์™”๋“ฏ์ด ํ”„๋กœ๊ทธ๋žจ ์™ธ์—๋„ ํ”„๋กœ๊ทธ๋ž˜๋ฐ ์–ธ์–ด์˜ ๋Ÿฐํƒ€์ž„์ด๋‚˜ ์›น ์„œ๋ฒ„, ๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค ๋“ฑ์ด ํ•จ๊ป˜ ์‹œ์Šคํ…œ์„ ๊ตฌ์„ฑํ•œ๋‹ค. + +์ด๋“ค์€ ์‹œ์Šคํ…œ์ด ๋™์ž‘ํ•˜๋Š” ๋ฐ ํ•„์š”ํ•˜์ง€๋งŒ, ๊ทธ ์™ธ์—๋„ ํ™”๋ฉด์„ ๊ตฌ์„ฑํ•˜๋Š” ์ด๋ฏธ์ง€, ์ž…๋ ฅ๋ฐ›์€๋ฐ์ดํ„ฐ ๋ณธ์ฒด ๋“ฑ์ด ์žˆ์„ ์ˆ˜ ์žˆ๋‹ค. +์ด๋Ÿฌํ•œ ํŒŒ์ผ์€ ์„œ๋ฒ„์— ์ €์žฅ๋˜๊ธฐ๋„ ํ•˜์ง€๋งŒ ๋•Œ๋กœ๋Š” SW์˜ ๊ฐœ์ž… ์—†์ด ์„œ๋ฒ„์™€ ๋กœ์ปฌ ์ปดํ“จํ„ฐ ๊ฐ„์— ํŒŒ์ผ์„ ์ฃผ๊ณ ๋ฐฉ์•„์•ผ ํ•  ๋•Œ๊ฐ€ ์žˆ๋‹ค. +์ด๋Ÿด ๋•Œ๋ฅผ ์œ„ํ•ด ํŒŒ์ผ ๋ณต์‚ฌํ•˜๋Š” ๋ฐฉ๋ฒ•์„ ์•Œ์•„๋‘ฌ์•ผ ํ•œ๋‹ค. + +### `docker cp(container cp)` + +```sh +docker cp ํ˜ธ์ŠคํŠธ_๊ฒฝ๋กœ ์ปจํ…Œ์ด๋„ˆ_์ด๋ฆ„:์ปจํ…Œ์ด๋„ˆ_๊ฒฝ๋กœ +docker cp ์ปจํ…Œ์ด๋„ˆ_์ด๋ฆ„:์ปจํ…Œ์ด๋„ˆ_๊ฒฝ๋กœ ํ˜ธ์ŠคํŠธ_๊ฒฝ๋กœ + +docker cp ์›๋ณธ_๊ฒฝ๋กœ ๋ณต์‚ฌํ• _๊ฒฝ๋กœ +``` + +### ์‹ค์Šต + +```html + + + +
์•ˆ๋…•ํ•˜์„ธ์š”!
+ + +``` + +```sh +C:\Users\Kwon>docker run --name apa000ex19 -d -p 8089:80 httpd +4c037b31d34ea7c0b500b3f84b53b67e04eb1d466ebb0be26dfa6c655b0df72b + +C:\Users\Kwon>docker cp C:\Users\Kwon\Desktop\repositories\SSAFY-Docker-Study\members\kwon\index.html apa000ex19:/usr/local/apache2/htdocs/ +Successfully copied 2.05kB to apa000ex19:/usr/local/apache2/htdocs/ +``` + +์œ„ html ํŒŒ์ผ์„ ๋ณต์‚ฌํ•ด์„œ localhost์— ์ ‘์†ํ•ด๋ณด๋ฉด ์•„๋ž˜์™€ ๊ฐ™์ด ํ™”๋ฉด์ด ๋‚˜์˜ค๋Š” ๊ฒƒ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ๋‹ค. + +![](/posting_imgs/ch6-1.png){: width="70%"} + +์ธ์ฝ”๋”ฉ์ด ๋ฌธ์ œ๊ฐ€ ์ƒ๊ฒผ๋Š”๋ฐ... ์ผ๋‹จ html์— ๋Œ€ํ•ด ๋‹ค๋ฃจ๋Š” ๊ฒƒ์ด ๋ชฉ์ ์ด ์•„๋‹ˆ๋ฏ€๋กœ ๋„˜์–ด๊ฐ€๋„๋ก ํ•˜์ž. + + +# SECTION 3. ๋ณผ๋ฅจ ๋งˆ์šดํŠธ + +> ๋ณผ๋ฅจ์ด๋ž€ **์Šคํ† ๋ฆฌ์ง€์˜ ํ•œ ์˜์—ญ์„ ๋ถ„ํ• ํ•œ ๊ฒƒ** + +๋ณผ๋ฅจ ๋งˆ์šดํŠธ๋ž€ ์ด๋Ÿฐ ๋ณผ๋ฅจ์„ ์—ฐ๊ฒฐํ•˜์—ฌ ์šด๋ น์ฒด์ œ ๋˜๋Š” ์†Œํ”„ํŠธ์›จ์–ด์˜ ๊ด€๋ฆฌํ•˜๊ฒŒ ๋‘๋Š” ์ผ์„ ๋งํ•œ๋‹ค. + +์ปจํ…Œ์ด๋„ˆ๋Š” ์“ฐ๊ณ  ๋ฒ„๋ฆฌ๋Š” ๊ฒƒ์ด๋ผ ์ด ๋‚ด๋ถ€์— ๋ฐ์ดํ„ฐ๊ฐ€ ์žˆ๋‹ค๋ฉด, ์–ธ์  ๊ฐ€๋Š” ์‚ญ์ œ๋˜๊ธฐ ๋งˆ๋ จ์ด๋‹ค. +๋ฐ์ดํ„ฐ๊ฐ€ ์ด๋ ‡๊ฒŒ ์‚ฌ๋ผ์ง€๋Š” ๊ฒƒ์„ ๋ง‰๊ธฐ ์œ„ํ•ด ๋งˆ์šดํŠธ๋ฅผ ํ•ด์„œ ์™ธ๋ถ€์— ๋ฐ์ดํ„ฐ๋ฅผ ์•ˆ์ „ํ•˜๊ฒŒ ๋ณด๊ด€ํ•œ๋‹ค. (Data persistency) + +### Data Persist +> "Data Persist"๋Š” ๋ฐ์ดํ„ฐ๋ฅผ "์˜๊ตฌ์ ์œผ๋กœ ์ €์žฅํ•˜๊ฑฐ๋‚˜ ์œ ์ง€"ํ•˜๋Š” ๊ฒƒ์„ ์˜๋ฏธํ•œ๋‹ค. +> +> ์ด๋Š” ์ฃผ๋กœ ํœ˜๋ฐœ์„ฑ ๋ฐ์ดํ„ฐ(์˜ˆ: ๋ฉ”๋ชจ๋ฆฌ์—๋งŒ ์กด์žฌํ•˜๋Š” ๋ฐ์ดํ„ฐ)์™€ ๋ฐ˜๋Œ€๋˜๋Š” ๊ฐœ๋…์œผ๋กœ ์˜๊ตฌ์ ์ธ ์ €์žฅ์†Œ(์˜ˆ:ํ•˜๋“œ์›จ์–ด)์— ๋ฐ์ดํ„ฐ๋ฅผ ์ €์žฅํ•˜์—ฌ ์‹œ์Šคํ…œ์ด ์žฌ์‹œ์ž‘ํ•˜๊ฑฐ๋‚˜ ์ „์› ์ข…๋ฃŒ ์ดํ›„์—๋„ ๋ฐ์ดํ„ฐ๊ฐ€ ์‚ฌ๋ผ์ง€์ง€ ์•Š๋Š” ๊ฒƒ์„ ๋œปํ•œ๋‹ค. + + +## Storage Mount + +๋„์ปค์—์„œ ์Šคํ† ๋ฆฌ์ง€ ๋งˆ์šดํŠธ๋Š” ๋‘ ๊ฐ€์ง€ ์ข…๋ฅ˜๊ฐ€ ์žˆ๋‹ค. + +### Volmune Mount + +๋ณผ๋ฅจ ๋งˆ์šดํŠธ๋Š” ๋„์ปค ์—”์ง„์ด ๊ด€๋ฆฌํ•˜๋Š” ์˜๋ ฅ ๋‚ด์— ๋งŒ๋“ค์–ด์ง„ ๋ณผ๋ฅจ์„ ์ปจํ…Œ์ด๋„ˆ์— ๋””์Šคํฌ ํ˜•ํƒœ๋กœ ๋งˆ์šดํŠธํ•œ๋‹ค. + +์ด๋ฆ„๋งŒ์œผ๋กœ ๊ด€๋ฆฌ๊ฐ€ ๊ฐ€๋Šฅํ•˜๋ฏ€๋กœ ๋‹ค๋ฃจ๊ธฐ ์‰ฝ์ง€๋งŒ ๋ณผ๋ฅจ์— ๋น„ํ•ด **์ง์ ‘ ์กฐ์ž‘ํ•˜๊ธฐ ์–ด๋ ค์šฐ๋ฏ€๋กœ** '์ž„์‹œ ๋ชฉ์ ์˜ ์‚ฌ์šฉ'์ด๋‚˜ +'์ž์ฃผ ์“ฐ์ง€๋Š” ์•Š์ง€๋งŒ ์ง€์šฐ๋ฉด ์•ˆ ๋˜๋Š” ํŒŒ์ผ'์„ ๋ชฉ์ ์œผ๋กœ ๋งŽ์ด ์‚ฌ์šฉํ•œ๋‹ค. + +### Bind Mount + +๋ฐ”์ธ๋“œ ๋งˆ์šดํŠธ๋Š” ๋„์ปค๊ฐ€ ์„ค์น˜๋œ ์ปดํ“จํ„ฐ์˜ ๋ฌธ์„œ ํด๋” ๋“ฑ์˜ ๋„์ปค ์—”์ง„์—์„œ ๊ด€๋ฆฌํ•˜์ง€ ์•Š๋Š” ์˜์—ญ์˜ ๊ธฐ์กด ๋””๋ ‰ํ„ฐ๋ฆฌ๋ฅผ ์ปจํ…Œ์ด๋„ˆ์— ๋งˆ์šดํŠธ ํ•˜๋Š” ๋ฐฉ์‹์ด๋‹ค. +ํŒŒ์ผ ๋‹จ์œ„ ๋งˆ์šดํŠธ๋„ ๊ฐ€๋Šฅํ•˜๋‹ค. + +ํด๋” ์†์— ํŒŒ์ผ์„ ์ง์ ‘ ๋„ฃ์–ด๋‘๊ฑฐ๋‚˜ ์—ด์–ด ๋ณผ ์ˆ˜ ์žˆ๊ธฐ ๋•Œ๋ฌธ์— **์ž์ฃผ ์‚ฌ์šฉํ•˜๋Š” ํŒŒ์ผ**์„ ๋‘๋Š” ๋ฐ ์‚ฌ์šฉํ•œ๋‹ค. + +| ํ•ญ๋ชฉ | ๋ณผ๋ฅจ ๋งˆ์šดํŠธ | ๋ฐ”์ธํŠธ ๋งˆ์šดํŠธ | +| :------------ | :---------------------- | :--------------------------- | +| ์Šคํ† ๋ฆฌ์ง€ ์˜์—ญ | ๋ณผ๋ฅจ | ๋””๋ ‰ํ„ฐ๋ฆฌ ๋˜๋Š” ํŒŒ์ผ | +| ๋ฌผ๋ฆฌ์  ์œ„์น˜ | ๋„์ปค ์—”์ง„์˜ ๊ด€๋ฆฌ ์˜์—ญ | ์–ด๋””๋“ ์ง€ ๊ฐ€๋Šฅ | +| ๋งˆ์šดํŠธ ์ ˆ์ฐจ | ๋ณผ๋ฅจ์„ ์ƒ์„ฑํ•œ ํ›„ ๋งˆ์šดํŠธ | ๊ธฐ์กด ํŒŒ์ผ ๋˜๋Š” ํด๋”๋ฅผ ๋งˆ์šดํŠธ | +| ๋‚ด์šฉ ํŽธ์ง‘ | ๋„์ปค ์ปจํ…Œ์ด๋„ˆ๋ฅผ ํ†ตํ•ด | ์ผ๋ฐ˜์ ์ธ ํŒŒ์ผ๊ณผ ๊ฐ™์ด | +| ๋ฐฑ์—… | ์ ˆ์ฐจ๊ฐ€ ๋ณต์žกํ•จ | ์ผ๋ฐ˜์ ์ธ ํŒŒ์ผ๊ณผ ๊ฐ™์ด | + +ํŒŒ์ผ์„ ์ง์ ‘ ํŽธ์ง‘ํ•ด์•ผ ํ•  ์ผ์ด ๋งŽ๋‹ค๋ฉด ๋ฐ”์ธ๋“œ ๋งˆ์šดํŠธ๋ฅผ, ๊ทธ๋ ‡์ง€ ์•Š๋‹ค๋ฉด ๋ณผ๋ฅจ ๋งˆ์šดํŠธ๋ฅผ ์‚ฌ์šฉํ•˜๋ฉด ๋œ๋‹ค. + +### ์Šคํ† ๋ฆฌ์ง€ ์˜์—ญ์„ ๋งˆ์šดํŠธํ•˜๋Š” ์ปค๋งจ๋“œ + +run ์ปค๋งจ๋“œ์˜ ์˜ต์…˜ ํ˜•ํƒœ๋กœ ์ง€์ •ํ•œ๋‹ค. + +๋งˆ์šดํŠธ ์ด์ „์— volume์„ ๋จผ์ € ๋งŒ๋“ค์–ด์•ผ ํ•œ๋‹ค. + +```sh +# ์ƒ์„ฑ +docker volume create volume_name +# ์‚ญ์ œ +docker volume rm volume_name +# ์ƒ์„ธ์ •๋ณด +docker volume inspect volume_name +# ๋ชฉ๋ก +docker volume ls +# ๋งˆ์šดํŠธ๋˜์ง€ ์•Š์€ ๋ณผ๋ฅจ ๋ชจ๋‘ ์‚ญ์ œ +docker volume prune +``` + +์ดํ›„ ์Šคํ† ๋ฆฌ์ง€๋ฅผ ๋งˆ์šดํŠธํ•œ๋‹ค + +```sh +# ๋ฐ”์ธ๋“œ ๋งˆ์šดํŠธ +-v ์Šคํ† ๋ฆฌ์ง€_์‹ค์ œ_๊ฒฝ๋กœ:์ปจํ…Œ์ด๋„ˆ_๋งˆ์šดํŠธ_๊ฒฝ๋กœ +# ๋ณผ๋ฅจ ๋งˆ์šดํŠธ +-v ๋ณผ๋ฅจ_์ด๋ฆ„:์ปจํ…Œ์ด๋„ˆ_๋งˆ์šดํŠธ_๊ฒฝ๋กœ +``` +--- + +# SECTION 4. ์ปจํ…Œ์ด๋„ˆ๋กœ ์ด๋ฏธ์ง€ ๋งŒ๋“ค๊ธฐ + +## ์ด๋ฏธ์ง€ ๋งŒ๋“ค๊ธฐ +### commit ์ปค๋งจ๋“œ๋กœ ์ปจํ…Œ์ด๋„ˆ๋ฅผ ์ด๋ฏธ์ง€๋กœ ๋ณ€ํ™˜ + +```sh +docker commit container_name new_image_name +``` + +### Dockerfile ์Šคํฌ๋ฆฝํŠธ๋กœ ์ด๋ฏธ์ง€ ๋งŒ๋“ค๊ธฐ + +Dockerfile ์Šคํฌ๋ฆฝํŠธ์—๋Š” ํ† ๋Œ€๊ฐ€ ๋  ์ด๋ฏธ์ง€๋‚˜ ์‹คํ–‰ํ•  ๋ช…๋ น์–ด ๋“ฑ์„ ๊ธฐ์ œํ•œ๋‹ค. + +์ด ํŒŒ์ผ์„ ํ˜ธ์ŠคํŠธ ์ปดํ“จํ„ฐ์˜ ์ด๋ฏธ์ง€ ์žฌ๋ฃŒ๊ฐ€ ๋“ค์–ด์žˆ๋Š” ํด๋”์— ๋„ฃ๋Š”๋‹ค. ์žฌ๋ฃŒ ํด๋”์—๋Š” ๊ทธ ์™ธ ์ปจํ…Œ์ด๋„ˆ์— ๋„ฃ์„ ํŒŒ์ผ์„ ํ•จ๊ป˜๋‘”๋‹ค. + +```sh +docker build -t new_image_name ์žฌ๋ฃŒ_ํด๋”_๊ฒฝ๋กœ +``` + +```Dockerfile +FROM ์ด๋ฏธ์ง€_์ด๋ฆ„ +COPY ์›๋ณธ_๊ฒฝ๋กœ ๋Œ€์ƒ_๊ฒฝ๋กœ +RUN ๋ฆฌ๋ˆ…์Šค_๋ช…๋ น์–ด +... +``` + +| ์ธ์ŠคํŠธ๋Ÿญ์…˜ | ๋‚ด์šฉ | +| :------------ | :--- | +| `FROM` |ํ† ๋Œ€๊ฐ€ ๋˜๋Š” ์ด๋ฏธ์ง€๋ฅผ ์ง€์ •| +| `ADD` |์ด๋ฏธ์ง€์— ํŒŒ์ผ์ด๋‚˜ ํด๋”๋ฅผ ์ถ”๊ฐ€| +| `COPY` |์ด๋ฏธ์ง€์— ํŒŒ์ผ์ด๋‚˜ ํด๋”๋ฅผ ๋ณต์‚ฌ| +| `RUN` |์ด๋ฏธ์ง€๋ฅผ ๋นŒ๋“œํ•  ๋•Œ ์‹คํ–‰ํ•  ๋ช…๋ น์–ด ์ง€์ •| +| `CMD` |์ปจํ…Œ์ด๋„ˆ๋ฅผ ์‹คํ–‰ํ•  ๋•Œ ์‹คํ–‰ํ•  ๋ช…๋ น์–ด ์ง€์ •| +| `ENTRYPOINT` |์ปจํ…Œ์ด๋„ˆ๋ฅผ ์‹คํ–‰ํ•  ๋•Œ ์‹คํ–‰ํ•  ๋ช…๋ น์–ด ๊ฐ•์ œ ์ง€์ •| +| `ONBUILD` |์ด ์ด๋ฏธ์ง€๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ ๋‹ค๋ฅธ ์ด๋ฏธ์ง€๋ฅผ ๋นŒ๋“œํ•  ๋•Œ ์‹คํ–‰ํ•  ๋ช…๋ น์–ด๋ฅผ ์ง€์ •| +| `EXPOSE` |์ด๋ฏธ์ง€๊ฐ€ ํ†ต์‹ ์— ์‚ฌ์šฉํ•  ํฌํŠธ๋ฅผ ๋ช…์‹œ์ ์œผ๋กœ ์ง€์ •| +| `VOLUME` |persistency data๋ฅผ ์ €์žฅํ•  ๊ฒฝ๋กœ๋ฅผ ๋ช…์‹œ์ ์œผ๋กœ ์ง€์ •| +| `ENV` |ํ™˜๊ฒฝ๋ณ€์ˆ˜ ์ •์˜| +| `WORKDIR` |`RUN`, `CMD`, `ENTRYPOINT`, `ADD`, `COPY`์— ์ •์˜๋œ ๋ช…๋ น์–ด๋ฅผ ์‹คํ–‰ํ•˜๋Š” ์ž‘์—… ๋””๋ ‰ํ„ฐ๋ฆฌ๋ฅผ ์ง€์ •| +| `SHELL` |๋นŒ๋“œ ์‹œ ์‚ฌ์šฉํ•  shell์„ ๋ณ€๊ฒฝ| +| `LABEL` |์ด๋ฆ„์ด๋‚˜ ๋ฒ„์ „, ์ €์ž‘์ž ์ •๋ณด๋ฅผ ์„ค์ •| +| `USER` |`RUN`, `CMD`, `ENTRYPOINT`์— ์ •์˜๋œ ๋ช…๋ น์–ด๋ฅผ ์‹คํ–‰ํ•˜๋Š” ์‚ฌ์šฉ์ž ๋˜๋Š” ๊ทธ๋ฃน์„ ์ง€์ •| +| `ARG` |`docker build` ์ปค๋งจ๋“œ๋ฅผ ์‚ฌ์šฉํ•  ๋•Œ ์ž…๋ ฅ๋ฐ›์„ ์ˆ˜ ์žˆ๋Š” ์ธ์ž๋ฅผ ์„ ์–ธ| +| `STOPSIGNAL` |`docker build` ์ปค๋งจ๋“œ๋ฅผ ์‚ฌ์šฉํ•  ๋•Œ ์ปจํ…Œ์ด๋„ˆ ์•ˆ์—์„œ ์‹คํ–‰ ์ค‘์ธ ํ”„๋กœ๊ทธ๋žจ์— ์ „๋‹ฌ๋˜๋Š” ์‹œ๊ทธ๋„์„ ๋ณ€๊ฒฝ| +| `HEALTHCHECK` |์ปจํ…Œ์ด๋„ˆ health check ๋ฐฉ๋ฒ•์„ ์ปค์Šคํ„ฐ๋งˆ์ด์ง•| + + +## ์ด๋ฏธ์ง€ ์˜ฎ๊ธฐ๊ธฐ + +์ปจํ…Œ์ด๋„ˆ๋Š” ๋จผ์ € ์ด๋ฏธ์ง€๋กœ ๋ณ€ํ™˜ํ•˜์ง€ ์•Š์œผ๋ฉด ์˜ฎ๊ธฐ๊ฑฐ๋‚˜ ๋ณต์‚ฌํ•  ์ˆ˜ ์—†๋‹ค. ํ•˜์ง€๋งŒ ์ด๋ฏธ์ง€ ์—ญ์‹œ ์ด๋ฏธ์ง€ ์ƒํƒœ ๊ทธ๋Œ€๋กœ๋Š” ์˜ฎ๊ธฐ๊ฑฐ๋‚˜ ๋ณต์‚ฌํ•  ์ˆ˜ ์—†์œผ๋ฏ€๋กœ ๋„์ปค ๋ ˆ์ง€์ŠคํŠธ๋ฆฌ๋ฅผ ํ†ตํ•˜๊ฑฐ๋‚˜ +`save` ์ปค๋งจ๋“œ๋ฅผ ์‚ฌ์šฉํ•ด tar ํฌ๋งท์œผ๋กœ ๋„์ปค ์—”์ง„์˜ ๊ด€๋ฆฌ ์˜์—ญ ๋ฐ–์œผ๋กœ ๋‚ด๋ณด๋‚ด์•ผ ํ•œ๋‹ค. ํŒŒ์ผ์€ ํ˜ธ์ŠคํŠธ ์ปดํ“จํ„ฐ์˜ ํŒŒ์ผ ์‹œ์Šคํ…œ์— ์ƒ์„ฑ๋œ๋‹ค. +ํŒŒ์ผ์„ ๋‹ค์‹œ ๋„์ปค ์—”์ง„์— ๊ฐ€์ ธ ์˜ค๋ ค๋ฉด `load` ์ปค๋งจ๋“œ๋ฅผ ์‚ฌ์šฉํ•œ๋‹ค. +```sh +docker save -o file_name.tar image_name +``` +--- + +# SECTION 5. ์ปจํ…Œ์ด๋„ˆ ๊ฐœ์กฐ + +๋„์ปค๋ฅผ ์‹ค์ œ ์šด์šฉํ•˜๋Š” ํ˜„์žฅ์—์„œ๋Š” ์‚ฌ๋‚ด์—์„œ ๊ฐœ๋ฐœํ•œ ์‹œ์Šคํ…œ์„ ์šด์˜ํ•˜๋Š” ๊ฒฝ์šฐ๊ฐ€ ๋งŽ๋‹ค. +์‚ฌ๋‚ด ๊ฐœ๋ฐœ ์‹œ์Šคํ…œ์ด ์•„๋‹ˆ๋”๋ผ๋„ ๊ณต์‹ ๋ฐฐํฌ๋˜๋Š” SW ์—ญ์‹œ ์ˆ˜์ •ํ•ด์•ผ ํ•  ํ•„์š”๊ฐ€ ์ข…์ข… ์žˆ๋‹ค. + +์ปจํ…Œ์ด๋„ˆ๋ฅผ ๊ฐœ์กฐํ•˜๋Š” ๋ฐฉ๋ฒ•์—๋Š” ๋‘ ๊ฐ€์ง€ ๋ฐฉ๋ฒ•์ด ์žˆ์œผ๋ฉฐ, ๋Œ€๋ถ€๋ถ„ ์ด๋ฅผ ํ˜ผ์šฉํ•˜์—ฌ ์‚ฌ์šฉํ•œ๋‹ค. + +1. ํŒŒ์ผ ๋ณต์‚ฌ์™€ ๋งˆ์šดํŠธ๋ฅผ ์‚ฌ์šฉํ•˜๋Š” ๋ฐฉ๋ฒ• +2. ์ปจํ…Œ์ด๋„ˆ์—์„œ ๋ฆฌ๋ˆ…์Šค ๋ช…๋ น์–ด๋ฅผ ์‹คํ–‰ํ•˜๋Š” ๋ฐฉ๋ฒ• + +### container์—์„œ ๋ช…๋ น์–ด ์‹คํ–‰ + +container์˜ bash์— ์ ‘๊ทผํ•ด ๋ช…๋ น์„ ์‹คํ–‰ํ•˜๊ธฐ ์œ„ํ•ด์„œ๋Š” `/bin/bash` ์ธ์ž๋ฅผ ์‚ฌ์šฉํ•œ๋‹ค. + +```sh +docker exec (options) container_name /bin/bash +docker run (options) image_name /bin/bash +``` + +container๊ฐ€ ์ด๋ฏธ ์‹คํ–‰ ์ค‘์ด๋ผ๋ฉด `exec`๋ฅผ ์‚ฌ์šฉํ•ด์„œ ์ ‘๊ทผํ•˜๊ณ , ์ƒˆ๋กœ ์‹œ์ž‘ํ•˜๋Š” ๊ฒฝ์šฐ run์œผ๋กœ ์‹คํ–‰ํ•œ๋‹ค. + +ํ•˜์ง€๋งŒ `run`์— ์ธ์ž๋ฅผ ๋ถ™์—ฌ ์‚ฌ์šฉํ•˜๋Š” ๊ฒฝ์šฐ ์ปจํ…Œ์ด๋„ˆ์— ๋“ค์–ด์žˆ๋Š” SW(์•„ํŒŒ์น˜ ๊ฐ™์€)๋ฅผ ์‹คํ–‰ํ•˜๋Š” ๋Œ€์‹  bash๊ฐ€ ์‹คํ–‰๋œ๋‹ค. +๊ทธ๋ž˜์„œ container๋Š” ์‹คํ–‰ ์ค‘์ธ๋ฐ SW๋Š” ์‹คํ–‰ ์ค‘์ด ์•„๋‹Œ ์ƒํƒœ๊ฐ€ ๋œ๋‹ค. ์ฆ‰, bash ์กฐ์ž‘์ด ๋๋‚˜๊ณ  **๋‹ค์‹œ `docker start`๋กœ ์žฌ์‹œ์ž‘**ํ•ด์•ผ ํ•œ๋‹ค. + +```sh +docker exec -it apa000ex23 /bin/bash + +# ์•„ํŒŒ์น˜ ์‹คํ–‰ ์•ˆ๋จ +docker run --name apa000ex23 -it -p 8089:80 httpd /bin/bash + +# ๋‚˜๊ฐ€๊ธฐ +exit +``` + +์ด๋Ÿฐ ์‹์œผ๋กœ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ๋‹ค. ์ด๋ ‡๊ฒŒ bash์— ์ ‘๊ทผํ•˜๋ฉด container์— ์ง์ ‘ ๋ช…๋ น์„ ๋‚ด๋ฆด ์ˆ˜ ์žˆ๊ฒŒ ๋œ๋‹ค. + +### ๋„์ปค ์—”์ง„์„ ํ†ตํ•œ ๋ช…๋ น๊ณผ ์ปจํ…Œ์ด๋„ˆ ๋‚ด๋ถ€์—์„œ ์‹คํ–‰ํ•˜๋Š” ๋ช…๋ น + +| ๋„์ปค ์—”์ง„ | ์ปจํ…Œ์ด๋„ˆ ๋‚ด๋ถ€ | +| :-------------------------: | :------------: | +| ๋„์ปค ์—”์ง„์˜ ์‹œ์ž‘ / ์ข…๋ฃŒ | SW ์„ค์น˜ | +| ์ปจํ…Œ์ด๋„ˆ์˜ ์‹œ์ž‘ / ์ข…๋ฃŒ | SW ์‹คํ–‰ / ์ข…๋ฃŒ | +| ์ปจํ…Œ์ด๋„ˆ ์•ˆํŒŽ์˜ ํŒŒ์ผ์„ ๋ณต์‚ฌ | SW ์„ค์ • ๋ณ€๊ฒฝ | +| | ํŒŒ์ผ ์ž‘์—… | +--- + + +# SECTION 6. ๋„์ปค ํ—ˆ๋ธŒ ๋กœ๊ทธ์ธ + +> ์ง์ ‘ ๋งŒ๋“  ์ด๋ฏธ์ง€๋„ ๋„์ปค ํ—ˆ๋ธŒ์— ์˜ฌ๋ฆด ์ˆ˜ ์žˆ์œผ๋ฉฐ, ๋น„๊ณต๊ฐœ๋กœ ์‚ฌ์šฉํ•˜๋Š” ๋„์ปค ํ—ˆ๋ธŒ ๊ฐ™์€ ์žฅ์†Œ๋„ ๋งŒ๋“ค ์ˆ˜ ์žˆ๋‹ค. + +## ๋„์ปค ํ—ˆ๋ธŒ์™€ ๋ ˆ์ง€์ŠคํŠธ๋ฆฌ + +์ด๋ฏธ์ง€๋ฅผ ๋ฐฐํฌํ•˜๋Š” ์žฅ์†Œ๋ฅผ **๋„์ปค ๋ ˆ์ง€์ŠคํŠธ๋ฆฌ**๋ผ๊ณ  ํ•œ๋‹ค. +์ผ๋ฐ˜์— ๊ณต๊ฐœ๋˜์–ด ์žˆ๋“  ๋ง๋“  ์ƒ๊ด€์—†์ด ์ด๋ฏธ์ง€๊ฐ€ ๋ฐฐํฌ๋˜๋Š” ๊ณณ์€ ๋ชจ๋‘ ๋„์ปค ๋ ˆ์ง€์ŠคํŠธ๋ฆฌ์ด๋‹ค. + +๋„์ปค ํ—ˆ๋ธŒ๋Š” ๋„์ปค ์ œ์ž‘์‚ฌ์—์„œ ์šด์˜ํ•˜๋Š” ๊ณต์‹ ๋„์ปค ๋ ˆ์ง€์ŠคํŠธ๋ฆฌ๋ฅผ ๋งํ•œ๋‹ค. +์šฐ๋ฆฌ๊ฐ€ `run` ์ปค๋งจ๋“œ๋ฅผ ์‚ฌ์šฉํ•  ๋•Œ ๋‚ด๋ ค๋ฐ›๋Š” ์ด๋ฏธ์ง€๋Š” ์ด๋ ‡๊ฒŒ ์ œ๊ณต๋œ๋‹ค. + +### ๋ ˆ์ง€์ŠคํŠธ๋ฆฌ์™€ ๋ฆฌํฌ์ง€ํ† ๋ฆฌ + +๋ ˆ์ง€์ŠคํŠธ๋ฆฌ๋Š” ์ด๋ฏธ์ง€๋ฅผ ๋ฐฐํฌํ•˜๋Š” ์žฅ์†Œ์ด๊ณ , ๋ฆฌํฌ์ง€ํ† ๋ฆฌ๋Š” ๋ ˆ์ง€์ŠคํŠธ๋ฆฌ๋ฅผ ๊ตฌ์„ฑํ•˜๋Š” ๋‹จ์œ„์ด๋‹ค. + +์ฆ‰ ๋ ˆ์ง€์ŠคํŠธ๋ฆฌ๋Š” ํšŒ์‚ฌ๋‚˜ ๋ถ€์„œ ๋‹จ์œ„๋กœ ๋งŒ๋“ค์ง€๋งŒ, ๋ฆฌํฌ์ง€ํ† ๋ฆฌ๋Š” SW๋ฅผ ๋‹จ์œ„๋กœ ํ•œ๋‹ค. + +--- + +## ํƒœ๊ทธ์™€ ์ด๋ฏธ์ง€ ์—…๋กœ๋“œ + +ํƒœ๊ทธ๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™์€ ํ˜•ํƒœ๋ฅผ ๋ˆ๋‹ค + +```sh +resistry_addr(docker_hub_id)/reporitory_name:ver +``` + +ํƒœ๊ทธ ๋ถ€์—ฌ๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™์ด ํ•œ๋‹ค. + +```sh +docker tag ์›๋ž˜_์ด๋ฏธ์ง€_์ด๋ฆ„ ๋ ˆ์ง€์ŠคํŠธ๋ฆฌ_์ฃผ์†Œ/๋ฆฌํฌ์ง€ํ† ๋ฆฌ_์ด๋ฆ„:๋ฒ„์ „ +``` + +์ด๋ฏธ์ง€๋ฅผ ์—…๋กœ๋“œ ํ•  ๋•Œ๋Š” `push`๋ฅผ ์‚ฌ์šฉํ•œ๋‹ค. + +```sh +docker push ๋ ˆ์ง€์ŠคํŠธ๋ฆฌ_์ฃผ์†Œ/๋ฆฌํฌ์ง€ํ† ๋ฆฌ_์ด๋ฆ„:๋ฒ„์ „ +``` \ No newline at end of file diff --git a/_posts/2024-12-12-dockerCH7.markdown b/_posts/2024-12-12-dockerCH7.markdown new file mode 100644 index 00000000000..c8a5c105b32 --- /dev/null +++ b/_posts/2024-12-12-dockerCH7.markdown @@ -0,0 +1,300 @@ +--- +title: "CH7. ๋„์ปค ์ปดํฌ์ฆˆ๋ฅผ ์ตํžˆ์ž [๊ทธ๋ฆผ์œผ๋กœ ๋ฐฐ์šฐ๋Š” ๋„์ปค & ์ฟ ๋ฒ„๋„คํ‹ฐ์Šค]" +author: kwon +date: 2024-12-12T14:00:00 +0900 +categories: [๋„์ปค] +tags: [docker] +math: true +mermaid: false +--- + +# SECTION 1. ๋„์ปค ์ปดํฌ์ฆˆ๋ž€? + +## ๋„์ปค ์ปดํฌ์ฆˆ + +์‹œ์Šคํ…œ ๊ตฌ์ถ•๊ณผ ๊ด€๋ จ๋œ ๋ช…๋ น์–ด๋ฅผ ํ•˜๋‚˜์˜ ํ…์ŠคํŠธ ํŒŒ์ผ(์ •์˜ ํŒŒ์ผ)์— ๊ธฐ์žฌํ•ด +๋ช…๋ น์–ด ํ•œ๋ฒˆ์— ์‹œ์Šคํ…œ ์ „์ฒด๋ฅผ ์‹คํ–‰ํ•˜๊ณ  ์ข…๋ฃŒ์™€ ํ๊ธฐ๊นŒ์ง€ ํ•œ๋ฒˆ์— ํ•˜๋„๋ก ๋„์™€์ฃผ๋Š” ๋„๊ตฌ์ด๋‹ค. + +![](/posting_imgs/docker-compose1.png) + +### ๊ตฌ์กฐ + +docker compose๋Š” ์‹œ์Šคํ…œ ๊ตฌ์ถ•์— ํ•„์š”ํ•œ ์„ค์ •์„ YAML(YAML Ain't Markup Language) ํฌ๋ฉง์œผ๋กœ ๊ธฐ์žฌํ•œ ์ •์˜ ํŒŒ์ผ์„ ์ด์šฉํ•ด ์ „์ฒด ์‹œ์Šคํ…œ์„ ์ผ๊ด„ ์‹คํ–‰(`run`) ๋˜๋Š” ์ผ๊ด„ ์ข…๋ฃŒ ๋ฐ ์‚ญ์ œ(`down`)ํ•  ์ˆ˜ ์žˆ๋Š” ๋„๊ตฌ๋‹ค. + +์ •์˜ ํŒŒ์ผ์—๋Š” ์ปจํ…Œ์ด๋„ˆ๋‚˜ ๋ณผ๋ฅจ์„ ์–ด๋–ค ์„ค์ •์œผ๋กœ ๋งŒ๋“ค์ง€์— ๋Œ€ํ•œ ํ•ญ๋ชฉ์ด ๊ธฐ์žฌ๋˜์–ด ์žˆ๋‹ค. +์ž‘์„ฑ ๋‚ด์šฉ์€ ๋„์ปค ๋ช…๋ น์–ด๋กธ ๋น„์Šทํ•˜์ง€๋งŒ ๋„์ปค ๋ช…๋ น์–ด๊ฐ€ ์•„๋‹ˆ๋‹ค. + +#### `up` +`docker run`๊ณผ ๋น„์Šทํ•˜๋‹ค, ์ •์˜ ํŒŒ์ผ์— ๊ธฐ์žฌ๋œ ๋‚ด์šฉ๋Œ€๋กœ ์ด๋ฏธ์ง€๋ฅผ ๋‚ด๋ ค๋ฐ›๊ณ  ์ปจํ…Œ์ด๋„ˆ๋ฅผ ์ƒ์„ฑ ๋ฐ ์‹คํ–‰ํ•œ๋‹ค. +์ •์˜ ํŒŒ์ผ์—๋Š” ๋„คํŠธ์›Œํฌ๋‚˜ ๋ณผ๋ฅจ์— ๋Œ€ํ•œ ์ •์˜๋„ ๊ธฐ์žฌํ•  ์ˆ˜ ์žˆ์–ด์„œ ์ฃผ๋ณ€ ํ™˜๊ฒฝ์„ ํ•œ๊บผ๋ฒˆ์— ์ƒ์„ฑํ•  ์ˆ˜ ์žˆ๋‹ค. + +#### `down` +์ปจํ…Œ์ด๋„ˆ์™€ ๋„คํŠธ์›Œํฌ๋ฅผ ์ •์ง€ ๋ฐ ์‚ญ์ œํ•œ๋‹ค. +**๋ณผ๋ฅจ๊ณผ ์ด๋ฏธ์ง€๋Š” ์‚ญ์ œํ•˜์ง€ ์•Š๋Š”๋‹ค**. ์ปจํ…Œ์ด๋„ˆ์™€ ๋„คํŠธ์›Œํฌ ์‚ญ์ œ ์—†์ด ์ข…๋ฃŒ๋งŒ ํ•˜๊ณ  ์‹ถ๋‹ค๋ฉด stop ์ปค๋งจ๋“œ๋ฅผ ์ด์šฉํ•œ๋‹ค. + +docker compose๊ฐ€ Dockerfile ์Šคํฌ๋ฆฝํŠธ์™€ ๋น„์Šทํ•˜๊ฒŒ ๋А๊ปด์งˆ ์ˆ˜ ์žˆ๋‹ค. +ํ•˜์ง€๋งŒ Dockerfile์€ ์ด๋ฏธ์ง€๋ฅผ ๋งŒ๋“ค๊ธฐ ์œ„ํ•œ ๊ฒƒ์œผ๋กœ **๋„คํŠธ์›Œํฌ๋‚˜ ๋ณผ๋ฅจ์€ ๋งŒ๋“ค ์ˆ˜ ์—†๋‹ค.** +๋ฐ˜๋ฉด, docker compose๋Š” docker run ๋ช…๋ น์–ด๋ฅผ ์—ฌ๋Ÿฌ ๊ฐœ ๋ชจ์•„๋†“์€ ๊ฒƒ๊ณผ ๊ฐ™์€ ๊ฒƒ์œผ๋กœ, +**๋„คํŠธ์›Œํฌ์™€ ๋ณผ๋ฅจ๊นŒ์ง€ ํ•จ๊ป˜ ๋งŒ๋“ค ์ˆ˜ ์žˆ๋‹ค.** + +--- + +# SECTION 2. ๋„์ปค ์ปดํฌ์ฆˆ์˜ ์„ค์น˜์™€ ์‚ฌ์šฉ๋ฒ• + +## docker compose์˜ ์„ค์น˜ +๋„์ปค ์ปดํฌ์ฆˆ๋Š” ํ† ์ปค ์—”์ง„๊ณผ ๋ณ„๊ฐœ์˜ SW์ด๋ฏ€๋กœ ์„ค์น˜๋ฅผ ๋”ฐ๋กœ ํ•ด์ค˜์•ผ ํ•œ๋‹ค. +์œˆ๋„์šฐ๋‚˜ mac OS์—์„œ ์‚ฌ์šฉํ•˜๋Š” docker desktop์€ docker compose๊ฐ€ ํ•จ๊ป˜ ์„ค์น˜๋˜๋ฏ€๋กœ ์‹ ๊ฒฝ ์“ธ ํ•„์š” ์—†๋‹ค. +๋ฆฌ๋ˆ…์Šค์˜ ๊ฒฝ์šฐ ์•„๋ž˜์˜ ๋ช…๋ น์–ด์— ๋”ฐ๋ผ ์„ค์น˜๋ฅผ ์ง„ํ–‰ํ•œ๋‹ค. + +```sh +sudo apt install -y python3 python3-pip +sudo pip3 install docker-compose +``` + +## docker compose์˜ ์‚ฌ์šฉ๋ฒ• + +docker compose๋ฅผ ์‚ฌ์šฉํ•˜๋ ค๋ฉด Dockerfile ์Šคํฌ๋ฆฝํŠธ๋กœ ์ด๋ฏธ์ง€๋ฅผ ๋นŒ๋“œํ•  ๋•Œ์ฒ˜๋Ÿผ ํ˜ธ์Šคํฌ ์ปดํ“จํ„ฐ์— ํด๋”๋ฅผ ๋งŒ๋“ค๊ณ  ์ด ํด๋”์— ์ •์˜ ํŒŒ์ผ(YAML)์„ ๋ฐฐ์น˜ํ•œ๋‹ค. + +์ •์˜ ํŒŒ์ผ์€ docker-compose.yml์ด๋ผ๋Š” ์ด๋ฆ„์œผ๋กœ ๋งŒ๋“ค์–ด์•ผ ํ•œ๋‹ค. +ํŒŒ์ผ์€ ํ˜ธ์ŠคํŠธ ์ปดํ“จํ„ฐ์— ๋ฐฐ์น˜๋˜์ง€๋งŒ ๋ช…๋ น์–ด๋Š” ๋˜‘๊ฐ™์ด ๋„์ปค ์—”์ง„์— ์ „๋‹ฌ๋˜๋ฉฐ, ๋งŒ๋“ค์–ด์ง„ ์ปจํ…Œ์ด๋„ˆ๋„ ๋™์ผํ•˜๊ฒŒ ๋„์ปค ์—”์ง„ ์œ„์—์„œ ๋™์ž‘ํ•œ๋‹ค. +์ฆ‰, ์‚ฌ๋žŒ์ด ์ž…๋ ฅํ•ด๋˜ ๋ช…๋ น์–ด๋“ค์„ docker compose๊ฐ€ ๋Œ€์‹  ์ž…๋ ฅํ•ด์ฃผ๋Š” ๊ตฌ์กฐ๋‹ค. + +์ •์˜ ํŒŒ์ผ์€ ํ•œ ํด๋”์— ํ•˜๋‚˜๋งŒ ์žˆ์„ ์ˆ˜ ์žˆ๋‹ค. +๊ทธ๋ž˜์„œ ์—ฌ๋Ÿฌ ๊ฐœ์˜ ์ •์˜ ํŒŒ์ผ์„ ์‚ฌ์šฉํ•˜๋ ค๋ฉด ๊ทธ ๊ฐœ์ˆ˜๋งŒํผ ํด๋”๋ฅผ ๋งŒ๋“ค์–ด์•ผ ํ•œ๋‹ค. +์ปจํ…Œ์ด๋„ˆ ์ƒ์„ฑ์— ํ•„์š”ํ•œ ์ด๋ฏธ์ง€ ํŒŒ์ผ๊ณผ ๊ฐ™์€ ๋ถ€๊ฐ€์ ์ธ ํŒŒ์ผ๋„ compose๊ฐ€ ์‚ฌ์šฉํ•  ํด๋”์— ํ•จ๊ป˜ ๋‘”๋‹ค. + +--- + +# SECTION 3. ๋„์ปค ์ปดํฌ์ฆˆ ํŒŒ์ผ์„ ์ž‘์„ฑํ•˜๋Š” ๋ฒ• + +## docker compose์˜ ๋‚ด์šฉ + +```yml +version: "3" + +sevices: + apa000ex2: + image: httpd + ports: + - 8080:80 + restart: always +``` + +์ด docker compose ํŒŒ์ผ์€ ์•„๋ž˜ ๋ช…๋ น์–ด์™€ ๊ฐ™์€ ๋‚ด์šฉ์„ ๋‹ด๊ณ  ์žˆ๋‹ค. + +```sh +docker run --name apa000ex2 -d -p 8080:80 httpd +``` + +์ „์ฒด์ ์ธ ๊ตฌ์กฐ๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™๋‹ค. + +```yml +version: "3" + +sevices: + container_name1: + image: image_name + networks: + - network_name + ports: + - port_setting + container_name2: + +networks: + network_name1: +volumes: + volume_name1: + volume_name2: +``` + +### compose ํŒŒ์ผ ์ž‘์„ฑ ์š”๋ น ์ •๋ฆฌ + +- ์ฒซ ์ค‘์— docker compose version์„ ๊ธฐ์žฌ +- ์ฃผ ํ•ญ๋ชฉ services, networks, volumes ์•„๋ž˜์— ์„ค์ • ๋‚ด์šฉ์„ ๊ธฐ์žฌ +- ํ•ญ๋ชฉ ๊ฐ„์˜ ์ƒํ•˜ ๊ด€๊ณ„๋Š” ๊ณต๋ฐฑ์„ ์‚ฌ์šฉํ•œ ๋“ค์—ฌ์“ฐ๊ธฐ๋กœ +- ๋“ค์—ฌ์“ฐ์ง€๋Š” ๊ฐ™์€ ์ˆ˜์˜ ๋ฐฐ์ˆ˜๋งŒํผ์˜ ๊ณต๋ฐฑ์„ ์‚ฌ์šฉ +- ์ด๋ฆ„์€ ์ฃผ ํ•ญ๋ชฉ ์•„๋ž˜์— ๋“ค์—ฌ์“ฐ๊ธฐ ํ•œ ๋‹ค์Œ ๊ธฐ์žฌ +- ์—ฌ๋Ÿฌ ํ•ญ๋ชฉ์„ ๊ธฐ์žฌํ•˜๋ ค๋ฉด ์ถœ ์•ž์— '-'์„ ๋ถ™์ž„ +- ์ด๋ฆ„ ๋’ค์—๋Š” ':'์„ ๋ถ™์ž„ +- ์ฝœ๋ก  ๋’ค์—๋Š” ๋ฐ˜๋“œ์‹œ ๊ณต๋ฐฑ์ด ์™€์•ผ ํ•จ +- \# ๋’ค์˜ ๋‚ด์šฉ์€ ์ฃผ์„์œผ๋กœ ๊ฐ„์ฃผ +- ๋ฌธ์ž์—ด์€ ์ž‘์€/ํฐ๋”ฐ์˜ดํ‘œ๋กœ ๊ฐ์‹ธ ์ž‘์„ฑ + +### compose ํŒŒ์ผ์˜ ํ•ญ๋ชฉ + +#### ์ฃผ ํ•ญ๋ชฉ + +| ํ•ญ๋ชฉ | ๋‚ด์šฉ | +| :------- | :------------ | +| services | ์ปจํ…Œ์ด๋„ˆ ์ •์˜ | +| networks | ๋„คํŠธ์›Œํฌ ์ •์˜ | +| volumes | ๋ณผ๋ฅจ ์ •์˜ | + +#### ์ž์ฃผ ๋‚˜์˜ค๋Š” ์ •์˜ ๋‚ด์šฉ + +| ํ•ญ๋ชฉ | `docker run`์—์„œ์˜ ์˜ต์…˜ / ์ธ์ž | ๋‚ด์šฉ | +| :---------- | :----------------------------- | :---------------------------------- | +| image | ์ด๋ฏธ์ง€ ์ธ์ž | ์‚ฌ์šฉํ•  ์ด๋ฏธ์ง€ ์ง€์ • | +| networks | `--net` | ์ ‘์†ํ•  ๋„คํŠธ์›Œํฌ ์„ค์ • | +| volumes | `-v`, `--mount` | ์Šคํ† ๋ฆฌ์ง€ ๋งˆ์šดํŠธ ์„ฑ์ • | +| environment | `-e` | ํ™˜๊ฒฝ๋ณ€์ˆ˜ ์„ค์ • | +| depends_on | - | ๋‹ค๋ฅธ ์„œ๋น„์Šค์— ๋Œ€ํ•œ ์˜์กด๊ด€๊ณ„๋ฅผ ์ •์˜ | +| restart | - | ์ปจํ…Œ์ด๋„ˆ ์ข…๋ฃŒ ์‹œ ์žฌ์‹œ์ž‘ ์—ฌ๋ถ€๋ฅผ ์„ค์ • | + +#### restart ์„ค์ •๊ฐ’ + +| ์„ค์ •๊ฐ’ | ๋‚ด์šฉ | +| :------------- | :----------------------------------------- | +| no | ์žฌ์‹œ์ž‘ ์•ˆํ•จ | +| always | ํ•ญ์ƒ ์žฌ์‹œ์ž‘ | +| on-failure | ํ”„๋กœ์„ธ์Šค๊ฐ€ 0 ์ด์™ธ์˜ ์ƒํƒœ๋กœ ์ข…๋ฃŒ๋˜๋ฉด ์žฌ์‹œ์ž‘ | +| unless-stopped | ์ข…๋ฃŒ ์‹œ ์žฌ์‹œ์ž‘ํ•˜์ง€ ์•Š์Œ. ์ด์™ธ์—๋Š” ์žฌ์‹œ์ž‘ | + +## ์‹ค์Šต + +### docker-compose.yml +```yml +version: "3.8" +services: + postgres: + build: + context: . + dockerfile: Dockerfile.postgres # postgres์šฉ Dockerfile + container_name: backend + environment: + POSTGRES_DB: rag + POSTGRES_USER: user + POSTGRES_PASSWORD: password + working_dir: /home + volumes: + - postgres_data:/var/lib/postgresql/data + - .:/home/app + ports: + - "5430:5432" + stdin_open: true + tty: true + command: tail -f /dev/null + # restart: unless-stopped + + vue: + build: + context: . + dockerfile: Dockerfile.vue # vue์šฉ Dockerfile + container_name: frontend + network_mode: host + working_dir: /app + volumes: + - .:/app # ๋กœ์ปฌ ๋””๋ ‰ํ† ๋ฆฌ๋ฅผ ์ปจํ…Œ์ด๋„ˆ์— ๋งˆ์šดํŠธ + ports: + - "5173:5173" + stdin_open: true + tty: true + +volumes: + postgres_data: +``` + +### Dockerfile.postgres +```Dockerfile +# ๋ฒ ์ด์Šค ์ด๋ฏธ์ง€ ์„ ํƒ +FROM kwon0528/postgres_backend:0.5 + +# ํ•„์š”ํ•œ ํŒจํ‚ค์ง€ ์„ค์น˜ (bash ์„ค์น˜) +RUN apt-get update && apt-get install -y bash + +# ์ž‘์—… ๋””๋ ‰ํ† ๋ฆฌ ์„ค์ • +WORKDIR /home + +# back.sh ํŒŒ์ผ ๋ณต์‚ฌ +COPY back.sh /home/back.sh + +# ์‹คํ–‰ ๊ถŒํ•œ ๋ถ€์—ฌ +RUN chmod +x /home/back.sh + +# ์‹คํ–‰ +CMD ["bash", "/home/back.sh"] +``` + + +### Dockerfile.vue +```Dockerfile +# ๋ฒ ์ด์Šค ์ด๋ฏธ์ง€ ์„ ํƒ +FROM kwon0528/vue_frontend:0.3.2 + +# ์ž‘์—… ๋””๋ ‰ํ† ๋ฆฌ ์„ค์ • +WORKDIR /app + +# # front.sh ํŒŒ์ผ ๋ณต์‚ฌ +# COPY front.sh /app/chuibot/front.sh + +# # ์‹คํ–‰ ๊ถŒํ•œ ๋ถ€์—ฌ +# RUN chmod +x /app/chuibot/front.sh + +# # ์‹คํ–‰ +# CMD ["sh", "/app/front.sh"] +# CMD cd ~/chuibot && git pull +``` + +์‹ค์ œ postgresql๊ณผ vue๋ฅผ ์‚ฌ์šฉํ•œ ํ”„๋กœ์ ํŠธ์—์„œ ํ™˜๊ฒฝ ์„ค์ •์„ ์œ„ํ•ด ๊ตฌ์„ฑํ–ˆ๋˜ docker compose์ด๋‹ค. +์œ„์—์„œ ์•Œ์•„๋ณธ ๊ฒƒ๊ณผ ๋‹ค๋ฅธ ์ ์€ image๋ฅผ ๋ฐ”๋กœ ๋ช…์‹œํ•˜์ง€ ์•Š๊ณ  Dockerfile์„ ๊ธฐ๋ฐ˜์œผ๋กœ build๋ฅผ ๊ตฌ์„ฑํ–ˆ๋‹ค๋Š” ๊ฒƒ์ด๋‹ค. + +์ด์ฒ˜๋Ÿผ docker compose ์•ˆ์—์„œ๋„ Dockerfile์„ ์‚ฌ์šฉํ•˜์—ฌ ๊ตฌ์„ฑํ• ์ˆ˜๋„ ์žˆ๋‹ค. + +--- + +# ๋„์ปค ์ปดํฌ์ฆˆ ์‹คํ–‰ + +## docker compose command + +### `docker-compose up` + +```sh +docker-compose -f compose_file_path up +``` + +compose ํŒŒ์ผ์˜ ๋‚ด์šฉ์— ๋”ฐ๋ผ ์ปจํ…Œ์ด๋„ˆ์™€ ๋ณผ๋ฅจ, ๋„คํŠธ์›Œํฌ๋ฅผ ์ƒ์„ฑํ•˜๊ณ  ์‹คํ–‰ํ•œ๋‹ค. + +#### ์˜ต์…˜ ํ•ญ๋ชฉ + +| ์˜ต์…˜ | ๋‚ด์šฉ | +| --------------------------- | ---------------------------------------------------- | +| `-d` | ๋ฐฑ๊ทธ๋ผ์šด๋“œ๋กœ ์‹คํ–‰ | +| `--no-color` | ํ™”๋ฉด ์ถœ๋ ฅ ๋‚ด์šฉ์„ ํ‘๋ฐฑ์œผ๋กœ ํ•จ | +| `--no-deps` | ๋งํฌ๋œ ์„œ๋น„์Šค๋ฅผ ์‹ฑํ–‰ํ•˜์ง€ ์•Š์Œ | +| `--force-recreate` | ์„ค์ • ๋˜๋Š” ์ด๋ฏธ์ง€๊ฐ€ ๋ณ€๊ฒฝ๋˜์ง€ ์•Š์•„๋„ ์ปจํ…Œ์ด๋„ˆ ์žฌ์ƒ์„ฑ | +| `--no-create` | ์ปจํ…Œ์ด๋„ˆ๊ฐ€ ์ด๋ฏธ ์กด์žฌํ•  ๊ฒฝ์šฐ ๋‹ค์‹œ ์ƒ์„ฑํ•˜์ง€ ์•Š์Œ | +| `--no-build` | ์ด๋ฏธ์ง€๊ฐ€ ์—†์–ด๋„ ์ด๋ฏธ์ง€๋ฅผ ๋นŒ๋“œํ•˜์ง€ ์•Š์Œ | +| `--build` | ์ปจํ…Œ์ด๋„ˆ๋ฅผ ์‹คํ–‰ํ•˜๊ธฐ ์ „์— ์ด๋ฏธ์ง€๋ฅผ ๋นŒ๋“œ | +| `--abort-on-container-exit` | ์ปจํ…Œ์ด๋„ˆ๊ฐ€ ํ•˜๋‚˜๋ผ๋„ ์ข…๋ฃŒ๋˜๋ฉด ๋ชจ๋“  ์ปจํ…Œ์ด๋„ˆ๋ฅผ ์ข…๋ฃŒ | +| `-t`, `--timeout` | ์ปจํ…Œ์ด๋„ˆ๋ฅผ ์กธ์š”ํ•  ๋•Œ์˜ ํƒ€์ž„์•„์›ƒ ์„ค์ •. ๊ธฐ๋ณธ์€ 10์ดˆ | +| `--remove-orphans` | ์ปดํฌ์ฆˆ ํŒŒ์ผ์— ์„ฒ์˜๋˜์ง€ ์•Š์€ ์„œ๋น„์Šค์˜ ์ปจํ…Œ์ด๋„ˆ๋ฅผ ์‚ญ์ œ | +| `--scale` | ์ปจํ…Œ์ด๋„ˆ ์ˆ˜ ๋ณ€๊ฒฝ | + +### `docker-compose down` + +```sh +docker-compose -f compose_file_path down +``` + +compose ํŒŒ์ผ์˜ ๋‚ด์šฉ์— ๋”ฐ๋ผ ์ปจํ…Œ์ด๋„ˆ์™€ ๋„คํŠธ์›Œํฌ๋ฅผ ์ข…๋ฃŒ ๋ฐ ์‚ญ์ œํ•œ๋‹ค. ๋ณผ๋ฅจ๊ณผ ์ด๋ฏธ์ง€๋Š” ์‚ญ์ œ๋˜์ง€ ์•Š๋Š”๋‹ค. + +#### ์˜ต์…˜ ํ•ญ๋ชฉ + +| ์˜ต์…˜ | ๋‚ด์šฉ | +| ------------------ | ----------------------------------------------------------------------------------------------------------------------------------------- | +| `-rmi` ์ข…๋ฅ˜ | ์‚ญ์ œ ์‹œ์— ์ด๋ฏธ์ง€๋„ ์‚ญ์ œํ•œ๋‹ค. ์ข…๋ฅ˜๋ฅผ all๋กœ ์ง€์ •ํ•˜๋ฉด ์‚ฌ์šฉํ–ˆ๋˜ ๋ชจ๋“  ์ด๋ฏธ์ง€๊ฐ€ ์‚ญ์ œ๋œ๋‹ค. local๋กœ ์ง€์ •ํ•˜๋ฉด ์ปค์Šคํ…€ ํƒœ๊ทธ๊ฐ€ ์—†๋Š” ์ด๋ฏธ์ง€๋งŒ ์‚ญ์ œ๋œ๋‹ค | +| `-v`, `--volume` | volumes ํ•ญ๋ชฉ์— ๊ธฐ์žฌ๋œ ๋ณผ๋ฅจ์„ ์‚ญ์ œํ•œ๋‹ค. ๋‹จ, external๋กœ ์ง€์ •๋œ ๋ณผ๋ฅจ์€ ์‚ญ์ œ๋˜์ง€ ์•Š๋Š”๋‹ค. | +| `--remove-orphans` | ์ปดํฌ์ฆˆ ํŒŒ์ผ์— ์ •์˜๋˜์ง€ ์•Š์€ ์„œ๋น„์Šค์˜ ์ปจํ…Œ์ด๋„ˆ๋„ ์‚ญ์ œํ•œ๋‹ค. | + +### `docker-compose stop` + +```sh +docker-compose -f compose_file_path stop