Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

Makefile 4.6 KB

You have to be logged in to leave a comment. Sign In
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
  1. .PHONY: clean data lint requirements sync_data_to_s3 sync_data_from_s3
  2. #################################################################################
  3. # GLOBALS #
  4. #################################################################################
  5. PROJECT_DIR := $(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
  6. BUCKET = [OPTIONAL] your-bucket-for-syncing-data (do not include 's3://')
  7. PROFILE = default
  8. PROJECT_NAME = analisis_open_data_pjud
  9. PYTHON_INTERPRETER = python3
  10. ifeq (,$(shell which conda))
  11. HAS_CONDA=False
  12. else
  13. HAS_CONDA=True
  14. endif
  15. #################################################################################
  16. # COMMANDS #
  17. #################################################################################
  18. ## Install Python Dependencies
  19. requirements: test_environment
  20. $(PYTHON_INTERPRETER) -m pip install -U pip setuptools wheel
  21. $(PYTHON_INTERPRETER) -m pip install -r requirements.txt
  22. ## Make Dataset
  23. data: requirements
  24. $(PYTHON_INTERPRETER) src/data/make_dataset.py data/raw data/processed
  25. ## Delete all compiled Python files
  26. clean:
  27. find . -type f -name "*.py[co]" -delete
  28. find . -type d -name "__pycache__" -delete
  29. ## Lint using flake8
  30. lint:
  31. flake8 src
  32. ## Upload Data to S3
  33. sync_data_to_s3:
  34. ifeq (default,$(PROFILE))
  35. aws s3 sync data/ s3://$(BUCKET)/data/
  36. else
  37. aws s3 sync data/ s3://$(BUCKET)/data/ --profile $(PROFILE)
  38. endif
  39. ## Download Data from S3
  40. sync_data_from_s3:
  41. ifeq (default,$(PROFILE))
  42. aws s3 sync s3://$(BUCKET)/data/ data/
  43. else
  44. aws s3 sync s3://$(BUCKET)/data/ data/ --profile $(PROFILE)
  45. endif
  46. ## Set up python interpreter environment
  47. create_environment:
  48. ifeq (True,$(HAS_CONDA))
  49. @echo ">>> Detected conda, creating conda environment."
  50. ifeq (3,$(findstring 3,$(PYTHON_INTERPRETER)))
  51. conda create --name $(PROJECT_NAME) python=3
  52. else
  53. conda create --name $(PROJECT_NAME) python=2.7
  54. endif
  55. @echo ">>> New conda env created. Activate with:\nsource activate $(PROJECT_NAME)"
  56. else
  57. $(PYTHON_INTERPRETER) -m pip install -q virtualenv virtualenvwrapper
  58. @echo ">>> Installing virtualenvwrapper if not already installed.\nMake sure the following lines are in shell startup file\n\
  59. export WORKON_HOME=$$HOME/.virtualenvs\nexport PROJECT_HOME=$$HOME/Devel\nsource /usr/local/bin/virtualenvwrapper.sh\n"
  60. @bash -c "source `which virtualenvwrapper.sh`;mkvirtualenv $(PROJECT_NAME) --python=$(PYTHON_INTERPRETER)"
  61. @echo ">>> New virtualenv created. Activate with:\nworkon $(PROJECT_NAME)"
  62. endif
  63. ## Test python environment is setup correctly
  64. test_environment:
  65. $(PYTHON_INTERPRETER) test_environment.py
  66. #################################################################################
  67. # PROJECT RULES #
  68. #################################################################################
  69. #################################################################################
  70. # Self Documenting Commands #
  71. #################################################################################
  72. .DEFAULT_GOAL := help
  73. # Inspired by <http://marmelab.com/blog/2016/02/29/auto-documented-makefile.html>
  74. # sed script explained:
  75. # /^##/:
  76. # * save line in hold space
  77. # * purge line
  78. # * Loop:
  79. # * append newline + line to hold space
  80. # * go to next line
  81. # * if line starts with doc comment, strip comment character off and loop
  82. # * remove target prerequisites
  83. # * append hold space (+ newline) to line
  84. # * replace newline plus comments by `---`
  85. # * print line
  86. # Separate expressions are necessary because labels cannot be delimited by
  87. # semicolon; see <http://stackoverflow.com/a/11799865/1968>
  88. .PHONY: help
  89. help:
  90. @echo "$$(tput bold)Available rules:$$(tput sgr0)"
  91. @echo
  92. @sed -n -e "/^## / { \
  93. h; \
  94. s/.*//; \
  95. :doc" \
  96. -e "H; \
  97. n; \
  98. s/^## //; \
  99. t doc" \
  100. -e "s/:.*//; \
  101. G; \
  102. s/\\n## /---/; \
  103. s/\\n/ /g; \
  104. p; \
  105. }" ${MAKEFILE_LIST} \
  106. | LC_ALL='C' sort --ignore-case \
  107. | awk -F '---' \
  108. -v ncol=$$(tput cols) \
  109. -v indent=19 \
  110. -v col_on="$$(tput setaf 6)" \
  111. -v col_off="$$(tput sgr0)" \
  112. '{ \
  113. printf "%s%*s%s ", col_on, -indent, $$1, col_off; \
  114. n = split($$2, words, " "); \
  115. line_length = ncol - indent; \
  116. for (i = 1; i <= n; i++) { \
  117. line_length -= length(words[i]) + 1; \
  118. if (line_length <= 0) { \
  119. line_length = ncol - indent - length(words[i]) - 1; \
  120. printf "\n%*s ", -indent, " "; \
  121. } \
  122. printf "%s ", words[i]; \
  123. } \
  124. printf "\n"; \
  125. }' \
  126. | more $(shell test $(shell uname) = Darwin && echo '--no-init --raw-control-chars')
Tip!

Press p or to see the previous file or, n or to see the next file

Comments

Loading...