Skip to content
Snippets Groups Projects
Commit 3e4a8990 authored by dj44vuri's avatar dj44vuri
Browse files

valugaps taken offline and new designs for feedadditives

parent 075ea82f
Branches
Tags
No related merge requests found
...@@ -5,4 +5,5 @@ ...@@ -5,4 +5,5 @@
dce_simulation_tool.Rproj dce_simulation_tool.Rproj
modeloutput/ modeloutput/
*.html *.html
simulation_output_files/ simulation_output_files/
\ No newline at end of file Projects/ValuGaps/
\ No newline at end of file
Design Choice situation alt1.b alt1.c alt1.p alt2.b alt2.c alt2.p Block
1 1 2 3 40 3 2 120 5
1 2 3 2 80 2 3 120 5
1 3 0 1 120 1 0 120 5
1 4 1 0 160 0 1 120 5
1 5 2 0 120 3 1 80 2
1 6 3 1 160 2 0 80 2
1 7 0 2 40 1 3 80 2
1 8 1 3 80 0 2 80 2
1 9 1 2 120 0 3 40 6
1 10 0 3 160 1 2 40 6
1 11 3 0 40 2 1 40 6
1 12 2 1 80 3 0 40 6
1 13 3 3 160 3 3 160 1
1 14 2 2 120 2 2 160 1
1 15 1 1 80 1 1 160 1
1 16 0 0 40 0 0 160 1
1 17 3 3 80 1 1 80 3
1 18 2 2 40 0 0 80 3
1 19 1 1 160 3 3 80 3
1 20 0 0 120 2 2 80 3
1 21 2 2 80 1 1 160 5
1 22 3 3 40 0 0 160 5
1 23 0 0 160 3 3 160 5
1 24 1 1 120 2 2 160 5
1 25 0 2 80 3 0 40 1
1 26 1 3 40 2 1 40 1
1 27 2 0 160 1 2 40 1
1 28 3 1 120 0 3 40 1
1 29 0 1 40 2 1 160 6
1 30 1 0 80 3 0 160 6
1 31 2 3 120 0 3 160 6
1 32 3 2 160 1 2 160 6
1 33 0 2 160 0 1 120 4
1 34 1 3 120 1 0 120 4
1 35 2 0 80 2 3 120 4
1 36 3 1 40 3 2 120 4
1 37 0 3 80 2 3 80 4
1 38 1 2 40 3 2 80 4
1 39 2 1 160 0 1 80 4
1 40 3 0 120 1 0 80 4
1 41 2 3 160 2 0 40 2
1 42 3 2 120 3 1 40 2
1 43 0 1 80 0 2 40 2
1 44 1 0 40 1 3 40 2
1 45 1 2 160 2 0 120 3
1 46 0 3 120 3 1 120 3
1 47 3 0 80 0 2 120 3
1 48 2 1 40 1 3 120 3
1 49 2 1 40 1 3 40 3
1 50 3 0 80 0 2 40 3
1 51 0 3 120 3 1 40 3
1 52 1 2 160 2 0 40 3
1 53 1 0 40 1 3 120 2
1 54 0 1 80 0 2 120 2
1 55 3 2 120 3 1 120 2
1 56 2 3 160 2 0 120 2
1 57 3 0 120 1 0 160 4
1 58 2 1 160 0 1 160 4
1 59 1 2 40 3 2 160 4
1 60 0 3 80 2 3 160 4
1 61 3 1 40 3 2 40 4
1 62 2 0 80 2 3 40 4
1 63 1 3 120 1 0 40 4
1 64 0 2 160 0 1 40 4
1 65 3 2 160 1 2 80 6
1 66 2 3 120 0 3 80 6
1 67 1 0 80 3 0 80 6
1 68 0 1 40 2 1 80 6
1 69 3 1 120 0 3 120 1
1 70 2 0 160 1 2 120 1
1 71 1 3 40 2 1 120 1
1 72 0 2 80 3 0 120 1
1 73 1 1 120 2 2 80 5
1 74 0 0 160 3 3 80 5
1 75 3 3 40 0 0 80 5
1 76 2 2 80 1 1 80 5
1 77 0 0 120 2 2 160 3
1 78 1 1 160 3 3 160 3
1 79 2 2 40 0 0 160 3
1 80 3 3 80 1 1 160 3
1 81 0 0 40 0 0 80 1
1 82 1 1 80 1 1 80 1
1 83 2 2 120 2 2 80 1
1 84 3 3 160 3 3 80 1
1 85 2 1 80 3 0 120 6
1 86 3 0 40 2 1 120 6
1 87 0 3 160 1 2 120 6
1 88 1 2 120 0 3 120 6
1 89 1 3 80 0 2 160 2
1 90 0 2 40 1 3 160 2
1 91 3 1 160 2 0 160 2
1 92 2 0 120 3 1 160 2
1 93 1 0 160 0 1 40 5
1 94 0 1 120 1 0 40 5
1 95 3 2 80 2 3 40 5
1 96 2 3 40 3 2 40 5
||||||||||
design
;alts = alt1*, alt2*, alt3
;rows = 72
;block = 6
;orth = sim
;con
;model:
U(alt1) = b1 + b2 * B[0,1,2,3] + b3 * C[0,1,2,3] + b5 * P[40,80,120,160]/
U(alt2) = b1 + b2 * B + b3 * C + b5 * P
;formatTitle = 'Scenario <scenarionumber> Block <blocknumber>'
;formatTableDimensions = 3, 5
;formatTable:
1,1 = '' /
1,2 = 'Share of protected areas' /
1,3 = 'Share of HNV' /
1,4 = 'Cost per year' /
1,5 = 'Choice question&:' /
2,1 = 'alt1' /
2,2 = '<alt1.b>' /
2,3 = '<alt1.c>' /
2,4 = '<alt1.p>' /
2,5 = '' /
3,1 = 'alt2' /
3,2 = '<alt2.b>' /
3,3 = '<alt2.c>' /
3,4 = '<alt2.p>' /
3,5 = ''
;formatTableStyle:
1,1 = 'default' /
1,2 = 'headingattribute' /
1,3 = 'headingattribute' /
1,4 = 'headingattribute' /
1,5 = 'headingattribute' /
2,1 = 'heading1' /
2,2 = 'body1' /
2,3 = 'body1' /
2,4 = 'body1' /
2,5 = 'choice1' /
3,1 = 'heading2' /
3,2 = 'body2' /
3,3 = 'body2' /
3,4 = 'body2' /
3,5 = 'choice2'
;formatStyleSheet = Default.css
;formatAttributes:
alt1.b(0=#0% more, 1=#0% more, 2=#0% more, 3=#0% more) /
alt1.c(0=#0% more, 1=#0% more, 2=#0% more, 3=#0% more) /
alt1.p(40=#, 80=#, 120=#, 160=#) /
alt2.b(0=#0% more, 1=#0% more, 2=#0% more, 3=#0% more) /
alt2.c(0=#0% more, 1=#0% more, 2=#0% more, 3=#0% more) /
alt2.p(40=#, 80=#, 120=#, 160=#)
$
\ No newline at end of file
# Only needed to be run one time to get bfn data
# can be adjusted to load data for other layers (nationalparks etc.)
library(xml2)
library(terra)
library(sf)
library(tidyverse)
library(gdalUtilities)
url <- "https://geodienste.bfn.de/ogc/wfs/schutzgebiet?REQUEST=GetCapabilities&SERVICE=WFS&VERSION=2.0.0"
wms_capabilities <- read_xml(url)
layers <- st_layers(url)
data_nasc <- read_sf(url, layer= "bfn_sch_Schutzgebiet:Naturschutzgebiete")
data_nasc <- data_nasc %>% mutate(value=1)
ggplot(data=data_nasc) +
#geom_sf(data=germany_geo) +
geom_sf(fill="darkgreen", col="darkgreen")
#ggsave("Figures/bfn_map.png", width = 5, height = 4)
# Rasterize data #
# Issue with conversion use function to convert data first before vectorizing
ensure_multipolygons <- function(X) {
tmp1 <- tempfile(fileext = ".gpkg")
tmp2 <- tempfile(fileext = ".gpkg")
st_write(X, tmp1)
ogr2ogr(tmp1, tmp2, f = "GPKG", nlt = "MULTIPOLYGON")
Y <- st_read(tmp2)
st_sf(st_drop_geometry(X), geom = st_geometry(Y))
}
data_nasc_con <- ensure_multipolygons(data_nasc)
data_nasc_con <- st_transform(data_nasc_con, crs="epsg:3035")
vec_nasc <- vect(data_nasc_con)
empty_raster <- rast(vec_nasc, resolution(1,1))
res(empty_raster) <- c(100, 100)
raster_nasc <- rasterize(vec_nasc, empty_raster, "value")
plot(raster_nasc, col="darkgreen")
saveRDS(raster_nasc, "data/gis/protected_bfn_raster.rds")
# Check if conversion and storage worked #
raster_bfn <- readRDS("data/gis/protected_bfn_raster.rds") # works
# Functions to compute shares
sum_cover <- function(x){
list(x %>%
group_by(value) %>%
summarize(total_area = sum(coverage_area, na.rm=T)) %>%
mutate(proportion = total_area/sum(total_area[1:2])))
}
process_data <- function(data, name_prefix, cover_data) {
result <- exact_extract(data, buffer_pts, fun=sum_cover, coverage_area=T, summarize_df=T)
col_name <- paste(name_prefix)
result <- as.data.frame(sapply(result, function(x) ifelse(length(x$proportion) >= 2, x$proportion[2], 0)))
colnames(result) <- col_name
result <- result %>% mutate(ID = 1:resps) %>% dplyr::select(ID, everything())
return(result)
}
##### Load in GIS data ####
## Draw random points without incorporating actual pop density (old)
# random_points <- st_sample(germany_geo_county, size = resps)
# plot(germany_geo) + plot(random_points, add=T)
#### Draw respondents ####
# Load population density data for Germany
density_raster <- rast("Projects/ValuGaps/data/gis/BBSR_Landnutzung_Bevoelkerung/ewz250_2011_v1_multi.tif")
#### Draw cells from raster considering population density as probability weights ####
random_points <- spatSample(density_raster, resps, method="weights", replace= TRUE, as.points=TRUE)
# set replace to TRUE to sample with replacement
random_points_ls <- base::split(sample(random_points), 1:length(buffer_dist_ls))
# Draw Buffer around random points
#### Split samples ####
buffer_pts_ls <- list()
buffer_pts_ls <- lapply(seq_along(random_points_ls), function(i) {
buffered <- buffer(random_points_ls[[i]], buffer_dist_ls[[i]])
buffered_sf <- st_as_sf(buffered, crs = crs(hnv))
buffered_sf
})
buffer_pts <- do.call(rbind, buffer_pts_ls)
### Equalize CRS's to avoid warning message; can be done here without problems as CRS is formally the same anyway
crs(hnv) <- crs(corine_hnv) <- crs(protected_areas) <- crs(corine_pa)
# Compute shares within Buffer with exact_extract and customized functions
hnv_shares <- process_data(hnv, "HNV", buffer_pts_ls)
hnv_possible <- process_data(corine_hnv, "HNV_possible", buffer_pts_ls)
protected_shares <- process_data(protected_areas, "protected", buffer_pts_ls)
protected_possible <- process_data(corine_pa, "Protected_possible", buffer_pts_ls)
# Merge protected area shares and HNV shares
land_cover_shares <- left_join(protected_shares, hnv_shares, by="ID")
possible_shares <- left_join(protected_possible, hnv_possible, by="ID")
final_set <- left_join(land_cover_shares, possible_shares, by="ID") %>%
mutate(HNV_on_corine = HNV/HNV_possible, Protected_on_corine = protected/Protected_possible,
buffer_dist_index = ceiling(row_number() / (resps / length(buffer_dist_ls))),
buffer_dist = map_dbl(buffer_dist_index, ~ buffer_dist_ls[[.]]),
SQ_HNV = (buffer_dist^2*HNV*pi)/10000,
SQ_Prot = (buffer_dist^2*protected*pi)/10000)
###################################################################################
#### Overview of GIS shares ####
# library(ggpubr)
# pa_pl <- ggplot(data=final_set) +
# geom_density(aes(x=Protected_on_corine), fill="lightblue") +
# xlab("Protected areas (% of possible)") +
# xlim(0:1)
#
# hnv_pl <- ggplot(data=final_set) +
# geom_density(aes(x=HNV_on_corine), fill="lightblue") +
# xlab("HNV (% of possible)") + xlim(0:1)
#
# pa_pos <- ggplot(data=final_set) +
# geom_density(aes(x=Protected_possible), fill="yellow") +
# xlab("Protected possible share") + xlim(0:1)
#
# hnv_pos <- ggplot(data=final_set) +
# geom_density(aes(x=HNV_possible), fill="yellow") +
# xlab("HNV possible share") + xlim(0:1)
#
# pa_sq <- ggplot(data=final_set) +
# geom_density(aes(x=SQ_Prot), fill="lightgreen") +
# xlab("Protected areas SQ (ha)") + xlim(0,40000)
#
# hnv_sq <- ggplot(data=final_set) +
# geom_density(aes(x=SQ_HNV), fill="lightgreen") +
# xlab("HNV SQ (ha)") + xlim(0,40000)
#
# ggarrange(pa_pl, hnv_pl, pa_pos, hnv_pos, pa_sq, hnv_sq, nrow = 3, ncol=2)
#
# ggsave("Figures/finalset_gis.png", dpi="print", width = 8, height = 6)
###################################################################################
# Do some time testing
tic()
crs(buffer_pts) <- crs(hnv)
hnv_shares <- terra::extract(hnv, buffer_pts, fun= mean, na.rm =T, weights = T)
toc()
sum_cover <- function(x){
list(x %>%
group_by(value) %>%
summarize(total_area = sum(coverage_area, na.rm=T)) %>%
mutate(proportion = total_area/sum(total_area[1:2])))
}
# More fast but not correct for points at the edge
tic()
poly <- st_as_sf(buffer_pts, crs=crs(hnv))
test_share <- exact_extract(hnv, poly, fun=sum_cover, coverage_area=T, summarize_df=T)
test_share <- as.data.frame(sapply(test_share, function(x) ifelse(length(x$proportion) >= 2, x$proportion[2], 0)))
colnames(test_share) <- "HNV"
test_share <- test_share %>% mutate(ID = 1:resps)
toc()
check <- (hnv_shares$eea_r_3035_100_m_hnv.farmland.ac_2012_v1_r00 - test_share$`sapply(test_share, function(x) x$proportion[2])`)
library(tidyverse)
library(tidylog)
library(apollo)
library(sf)
library(terra)
library(exactextractr)
library(ggspatial)
library(viridis)
#### 1 Define parameters and functions ####
buffer_dist = 15000 # distance we use in dce
### Summarizing function
sum_cover <- function(x){
list(x %>%
group_by(value) %>%
summarize(total_area = sum(coverage_area)) %>%
mutate(proportion = total_area/sum(total_area)))
}
#### 2 Load in model data ####
map_data <- readRDS("output/96_3runs_4designs_mixl_2023-08-14.RDS")
# Use estimates from first simulated model for example map
model1 <- as.data.frame(map_data$ort2attr[[1]]$estimate)
model1 <- rownames_to_column(model1)
colnames(model1) <- c("Parameter", "Estimate")
#### 3 Read in data for Germany with administrative borders ####
# https://gadm.org/download_country.html #
germany_county <- read_sf("data/gadm41_DEU_shp", "gadm41_DEU_2") # county borders
germany_gemeinde <- read_sf("data/gadm41_DEU_shp", "gadm41_DEU_3") # Gemeinde borders
#### 4 Read in CORINE Land cover data ####
corine <- rast("data/gis/corine_raster_2018/DATA/U2018_CLC2018_V2020_20u1.tif")
germany_geo <- st_transform(germany_county$geometry, crs=crs(corine)) # used to crop corine data to Germany
corine <- crop(corine, germany_geo)
corine <- mask(corine, vect(germany_geo))
levels(corine)
hnv_categories <- c(12, 14:22, 26:29, 32, 35:37) # see p.37 HNV EUROPE DATA Documentation
corine_hnv <- corine
corine_hnv[!(corine_hnv %in% hnv_categories)] <-NA # Make binary categories
corine_hnv <- ifel(is.na(corine_hnv), 0, 1)
corine_hnv <- mask(corine_hnv, vect(germany_geo))
#### 5 High Nature Value (HNV) ####
# Calculate share of HNV for each county
hnv <- rast("data/gis/hnv_germany.tif")
###### Overlap of HNV and hnv_corine ####
hnv_true <- hnv > 0
hnv_filtered <- mask(hnv, hnv_true, maskvalue=0)
hnv_on_corine <- intersect(corine_hnv, hnv_filtered)
hnv_on_corine <- mask(hnv_on_corine, vect(germany_geo)) #hnv only on the categories where we would expect it
corine_hnv_true <- corine_hnv > 0
corine_hnv_test <- mask(corine_hnv, corine_hnv_true, maskvalue=0)
germany_county <- st_transform(germany_county, crs(corine_hnv))
germany_gemeinde <- st_transform(germany_gemeinde, crs(corine_hnv))
##### Calculate actual and possible HNV share for all counties ######
counties <- germany_county$NAME_2
hnv_list <- list()
hnv_possible_list <- list()
z= 1
for (county in counties) {
temp_county <- germany_county %>% filter(NAME_2 == counties[z])
hnv_possible_temp <- as.data.frame(
exact_extract(corine_hnv, temp_county, coverage_area = TRUE, summarize_df = TRUE, fun = sum_cover)) # caclulate possible area
hnv_possible_list[[county]] <- hnv_possible_temp
hnv_temp <- as.data.frame(
exact_extract(hnv, temp_county, coverage_area = TRUE, summarize_df = TRUE, fun = sum_cover)) # calculate HNV share on total area
hnv_list[[county]] <- hnv_temp
print(paste0(n_distinct(germany_county$NAME_2) - z, " remaining"))
z = z+1
}
hnv_shares_counties <- as.data.frame(sapply(hnv_list, function(x) x$proportion[2]))
hnv_shares_counties <- rownames_to_column(hnv_shares_counties)
hnv_shares_counties$possible <- as.numeric(sapply(hnv_possible_list, function(x) x$proportion[2]))
colnames(hnv_shares_counties) <- c("County", "HNV_share", "HNV_Possible")
hnv_shares_counties <- hnv_shares_counties %>% mutate(HNV_share = replace_na(HNV_share, 0),
HNV_on_corine = HNV_share/HNV_Possible)
##### check total HNV area in relation to Germany and Germany agricultural area ####
hnv_area <- as.data.frame(sapply(hnv_list, function(x) x$total_area[2]))
names(hnv_area) <- "Area"
hnv_area_sum <- hnv_area %>% summarize(Total_area = sum(Area, na.rm=T)/10000)
hnv_area_possible <- as.data.frame(sapply(hnv_possible_list, function(x) x$total_area[2]))
names(hnv_area_possible) <- "Area"
hnv_area_sum_possible <- hnv_area_possible %>% summarize(Total_area = sum(Area, na.rm=T)/10000)
share_germany_hnv <- as.numeric(hnv_area_sum)/as.numeric(hnv_area_sum_possible)
###### Calculate WTP based on share for HNV ####
wtp_hnv <- function(x) {-(model1$Estimate[[4]] + 2*model1$Estimate[5]*x)/model1$Estimate[6]}
hnv_shares_counties <- hnv_shares_counties %>% mutate(WTP_HNV = wtp_hnv(HNV_share*100))
###### Create HNV map ####
# Merge county shapes with data frame
germany_county <- germany_county %>% rename("County" = NAME_2)
merge_hnv <- hnv_shares_counties %>% left_join(germany_county, by="County") %>%
select(County, HNV_share, WTP_HNV, geometry)
ggplot(data=germany_county) +
geom_sf(fill="gray98", alpha=1) +
geom_sf(data=merge_hnv, aes(fill=WTP_HNV, geometry=geometry), size=2) +
annotation_scale(location = "bl", width_hint = 0.1,plot_unit="m", line_width = 0.1) +
annotation_north_arrow(location = "br" ,pad_x = unit(0, "in"), pad_y = unit(0, "in"),
style = north_arrow_fancy_orienteering) +
labs(fill= "WTP (€)") +
scale_fill_gradientn(colours = c("cornsilk1", "royalblue3")) +
theme(plot.margin = unit(c(0.1,0.1,0.1,0.1), "cm")) +
ggtitle("WTP for a 1% increase in HNV")
ggsave("Figures/hnv_cont.png", width=6, height=5, dpi="print")
## discontinous map ##
# quantile(merge_hnv$WTP_HNV)
# merge_hnv$cuts <- cut(merge_hnv$WTP_HNV, breaks = c(14, 22, 30, 38, 47))
#
#
# ggplot(data=germany_county) +
# geom_sf(fill="gray98") +
# geom_sf(data=merge_hnv, aes(fill=cuts, geometry=geometry), size=2) +
# annotation_scale(location = "bl", width_hint = 0.12,plot_unit="m", line_width = 0.5) +
# annotation_north_arrow(location = "br" ,pad_x = unit(0, "in"), pad_y = unit(0, "in"),
# style = north_arrow_fancy_orienteering) +
# labs(fill= "WTP (€)") +
# scale_fill_manual(values = c("red3","orange", "gold1", "chartreuse3", "forestgreen"),
# labels = c("< -17", " -17 - 0", "0 - 23", "23 - 46", "46 - 64"))
#
# ggsave("Figures/hnv_disc.png", dpi="print", width=7, height=5)
#### 6 Protected Areas ####
#### Protected Areas and CORINE
protected_area_categories <- c(10, 12:41) # remove artifical areas except UGS and remove marine waters
corine_pa <- corine
corine_pa[!(corine_pa %in% protected_area_categories)] <-NA # Make binary categories
corine_pa <- ifel(is.na(corine_pa), 0, 1)
corine_pa <- mask(corine_pa, vect(germany_geo))
##### Load PA data ######
protected_areas_all <- readRDS("data/gis/germany_protected.rds")
categories_to_drop <- c(0,1,2,3, 4, 6, 8, 11) # define categories that should not be included in protected areas
protected_areas <- protected_areas_all
protected_areas[protected_areas %in% categories_to_drop] <-NA # Make binary categories
protected_areas <- ifel(is.na(protected_areas), 0, 1)
protected_areas <- mask(protected_areas, vect(germany_geo))
prot_list <- list()
prot_possible_list <- list()
z= 1
for (county in counties) {
temp_county <- germany_county %>% filter(County == counties[z])
prot_corine_temp <- as.data.frame(
exact_extract(corine_pa, temp_county, coverage_area = TRUE, summarize_df = TRUE, fun = sum_cover))
prot_possible_list[[county]] <- prot_corine_temp
prot_temp <- as.data.frame(
exact_extract(protected_areas, temp_county, coverage_area = TRUE, summarize_df = TRUE, fun = sum_cover))
prot_list[[county]] <- prot_temp
z = z+1
}
prot_counties <- as.data.frame(sapply(prot_list, function(x) x$proportion[2]))
prot_counties <- rownames_to_column(prot_counties)
prot_counties$Prot_Possible <- as.numeric(sapply(prot_possible_list, function(x) x$proportion[2]))
colnames(prot_counties) <- c("County", "Prot_share", "Prot_possible")
prot_counties <- prot_counties %>% mutate(Prot_share = replace_na(Prot_share , 0))
prot_counties <- prot_counties %>% mutate(Prot_share_on_corine = Prot_share/Prot_possible)
###### Calculate WTP based on share for Protected areas ####
wtp_prot <- function(x) {-(model1$Estimate[[2]] + 2*model1$Estimate[3]*x)/model1$Estimate[6]}
prot_counties <- prot_counties %>% mutate(WTP_Prot = wtp_prot(Prot_share*100))
merge_prot <- prot_counties %>% left_join(germany_county, by="County") %>%
select(County, Prot_share, WTP_Prot, geometry)
ggplot(data=germany_county) +
geom_sf(fill="gray98", alpha=1) +
geom_sf(data=merge_prot, aes(fill=WTP_Prot, geometry=geometry), size=2) +
annotation_scale(location = "bl", width_hint = 0.1,plot_unit="m", line_width = 0.1) +
annotation_north_arrow(location = "br" ,pad_x = unit(0, "in"), pad_y = unit(0, "in"),
style = north_arrow_fancy_orienteering) +
labs(fill= "WTP (€)") +
#scale_fill_viridis(option="C")
scale_fill_gradientn(colours = c("cornsilk1", "darkgreen")) +
theme(plot.margin = unit(c(0.1,0.1,0.1,0.1), "cm"))
ggsave("Figures/prot_cont.png",width=6, height=5, dpi="print")
# make map for Gemeinde protected
gemeinden <- germany_gemeinde$NAME_3
gemeinden_code <- germany_gemeinde$CC_3 # use unique code since names are doubling for some gemeinden
prot_list_g <- list()
z= 1
for (gemeinde in gemeinden) {
temp_county <- germany_gemeinde %>% filter(CC_3 == gemeinden_code[z])
prot_temp <- as.data.frame(
exact_extract(protected_areas, temp_county, coverage_area = TRUE, summarize_df = TRUE, fun = sum_cover))
prot_list_g[[gemeinde]] <- prot_temp
z = z+1
}
prot_gemeinde <- as.data.frame(sapply(prot_list_g, function(x) x$proportion[2]))
prot_gemeinde <- rownames_to_column(prot_gemeinde)
colnames(prot_gemeinde) <- c("Gemeinde", "Prot_share")
prot_gemeinde <- prot_gemeinde %>% mutate(Prot_share = replace_na(Prot_share , 0))
prot_gemeinde <- prot_gemeinde %>% mutate(WTP_Prot = wtp_prot(Prot_share*100))
germany_gemeinde <- germany_gemeinde %>% rename("Gemeinde" = NAME_3)
merge_prot_g <- prot_gemeinde %>% left_join(germany_gemeinde, by="Gemeinde") %>%
select(Gemeinde, Prot_share, WTP_Prot, geometry)
ggplot(data=germany_gemeinde) +
geom_sf(fill="gray98", alpha=1) +
geom_sf(data=merge_prot_g, aes(fill=WTP_Prot, geometry=geometry), size=2, col="NA") +
annotation_scale(location = "bl", width_hint = 0.1,plot_unit="m", line_width = 0.1) +
annotation_north_arrow(location = "br" ,pad_x = unit(0, "in"), pad_y = unit(0, "in"),
style = north_arrow_fancy_orienteering) +
labs(fill= "WTP (€)") +
#scale_fill_viridis(option="C")
scale_fill_gradientn(colours = c("indianred", "white", "darkseagreen", "darkgreen")) +
ggtitle("WTP for a 1% increase in PA Share") +
theme(plot.margin = unit(c(0.1,0.1,0.1,0.1), "cm"))
ggsave("Figures/prot_g_cont.png", dpi="print", width=6, height=5)
### Do plot for protected shares gemeinde ###
ggplot(data=merge_prot_g) +
geom_density(aes(x=Prot_share*100), col="darkseagreen", fill="darkseagreen", alpha=0.5) +
xlab("Share of Protected Areas (%)") +
ggtitle("Overview Protected Area Shares Gemeinden")
ggsave("Figures/prot_share_g.png", width = 7, height = 5, dpi="print")
# Do plot for HNV Share counties
ggplot(data=merge_hnv) +
geom_density(aes(x=HNV_share*100), col="royalblue", fill="royalblue", alpha=0.5) +
xlab("HNV Share (%)") +
ggtitle("Overview HNV Shares Counties")
ggsave("Figures/hnv_share.png", width = 7, height = 5, dpi="print")
### 7 Raster values WTP #####
##### Scaling ####
scale_factor <- pi*(buffer_dist/1000)^2 # Calculate total area that is valued in the DCE as scaling factor
###### Raster HNV (need to work on this further) ####
#inefficient as hell
agg_hnv <- aggregate(hnv, fact=10, fun=mean)
check_list <- c(unlist(values(agg_hnv)))
# try something, check this
agg_hnv_wtp <- -(model1$Estimate[[4]] + 2*model1$Estimate[5]*(check_list*100))/model1$Estimate[6]
values(agg_hnv) <- agg_hnv_wtp/scale_factor
agg_hnv_gg <- as.data.frame(agg_hnv, xy=TRUE)
colnames(agg_hnv_gg) <- c("x", "y", "WTP")
# somehow raster only works sometimes, tile works but looks not as nice
ggplot(data=agg_hnv_gg) +
geom_tile(aes(x = x, y = y, fill = WTP)) +
coord_quickmap() +
annotation_scale(location = "bl", width_hint = 0.1,plot_unit="m", line_width = 0.1) +
annotation_north_arrow(location = "br" ,pad_x = unit(0, "in"), pad_y = unit(0, "in"),
style = north_arrow_fancy_orienteering) +
#scale_fill_viridis(option="C", direction = 1)
scale_fill_gradientn(colours = c("gold", "cornsilk1", "royalblue3"))
ggsave("Figures/hnv_wtp_raster.png", width=6, height=5, dpi="print")
###### Raster protected ####
agg_prot <- aggregate(protected_areas, fact=10, fun=mean)
agg_prot <- mask(agg_prot, germany_county)
values_vec <- c(unlist(values(agg_prot)))
prot_wtp <- -(model1$Estimate[[2]] + 2*model1$Estimate[3]*values_vec*100)/model1$Estimate[6]
values(agg_prot) <- prot_wtp
agg_prot_gg <- as.data.frame(agg_prot, xy=TRUE)
colnames(agg_prot_gg) <- c("x", "y", "WTP")
# Plot protected
ggplot(data=agg_prot_gg) +
geom_raster(aes(x = x, y = y, fill = WTP)) +
coord_equal() +
annotation_scale(location = "bl", width_hint = 0.1,plot_unit="m", line_width = 0.1) +
annotation_north_arrow(location = "br" ,pad_x = unit(0, "in"), pad_y = unit(0, "in"),
style = north_arrow_fancy_orienteering) +
#scale_fill_viridis(option="C", direction = 1)
scale_fill_gradientn(colours = c("cornsilk", "white", "darkseagreen", "darkgreen"))
ggsave("Figures/prot_wtp_raster.png", width=6, height=5, dpi="print")
#### 8 Load in Pop density data ####
density_raster <- rast("data/gis/BBSR_Landnutzung_Bevoelkerung/ewz250_2011_v1_multi.tif")
dens_rast_1000 <- aggregate(density_raster, fact=4, fun=sum)
dens_gg <- as.data.frame(dens_rast_1000, xy=TRUE)
colnames(dens_gg) <- c("x", "y", "Density")
ggplot(data=dens_gg) +
geom_tile(aes(x = x, y = y, fill = Density)) +
coord_equal() +
annotation_scale(location = "bl", width_hint = 0.1,plot_unit="m", line_width = 0.1) +
annotation_north_arrow(location = "br" ,pad_x = unit(0, "in"), pad_y = unit(0, "in"),
style = north_arrow_fancy_orienteering) +
scale_fill_viridis(option="C", direction = 1)
#### Calculate people within circle for cell centroids, a lot of work to do here
library(raster)
# subset for testing code
#subset_extent <- ext(4330000, 4380000, 2780000, 2830000)
#hnv_sub <- crop(agg_hnv, subset_extent)
hnv_sub <- aggregate(hnv, fact=50, fun=mean) # 5km x 5km cells
hnv_sub_gg <- as.data.frame(hnv_sub, xy=TRUE)
hnv_sub_gg <- rowid_to_column(hnv_sub_gg, var = "ID")
hnv_to_merge <- hnv_sub_gg %>% dplyr::select(ID, "HNV_share" = "eea_r_3035_100_m_hnv-farmland-ac_2012_v1_r00")
#### 9 Calculate WTP per cell ####
check_list <- c(unlist(values(hnv_sub)))
agg_hnv_wtp <- -(model1$Estimate[[4]] + 2*model1$Estimate[5]*(check_list*100))/model1$Estimate[6]
values(hnv_sub) <- agg_hnv_wtp/scale_factor
# transform density raster to same resolution #
dens_sub <- aggregate(density_raster, fact=20, fun=sum) # 5km x 5km cells
grid <- rast(hnv_sub, vals = values(hnv_sub)) # empty raster
grid_points <- as.points(grid, values=TRUE, na.rm=TRUE, na.all=FALSE)
grid_points_gg <- as.data.frame(grid, xy=TRUE)
gridPoints <- SpatialPoints(grid_points_gg)
vec_gP <- vect(gridPoints)
crs(vec_gP) <- crs(dens_sub)
buffer_pts <- buffer(vec_gP, buffer_dist)
# Compute people within Buffer
density_in_buffer <- extract(dens_sub, buffer_pts, fun= sum, na.rm =T, weights = TRUE)
# Check if this is really correct with the ID
grid_points_gg <- rowid_to_column(grid_points_gg, var = "ID")
grid_merge <- left_join(grid_points_gg, density_in_buffer, by="ID")
colnames(grid_merge) <- c("ID", "x", "y", "marg_WTP", "Density")
grid_merge <- left_join(grid_merge, hnv_to_merge, by="ID")
## calculate WTP for 1ha increase, produces infinity values if SQ = 0 ! ##
#### work on here ### 1% increase is always the same area actually not depended on SQ with the current design
#need to define this clearly for the DCE #
# For now assumed that we value 1% increase of HNV on the total land area which is always the same area
# Aggregated WTP is the Aggregated WTP for a one hectare increase of HNV in the raster cell
grid_merge <- grid_merge %>% mutate(SQ_HNV = (HNV_share * unique(res(hnv_sub))^2)/10000,
agg_WTP = marg_WTP*Density/(((unique(res(hnv_sub))^2)/10000)*0.01))
###
ggplot(data=grid_merge) +
geom_tile(aes(x = x, y = y, fill = agg_WTP)) +
coord_equal() +
annotation_scale(location = "bl", width_hint = 0.1,plot_unit="m", line_width = 0.1) +
annotation_north_arrow(location = "br" ,pad_x = unit(0, "in"), pad_y = unit(0, "in"),
style = north_arrow_fancy_orienteering) +
scale_fill_viridis(option="C", direction = 1)
ggsave("Figures/agg_wtp_example.png", dpi="print", width = 7, height = 5)
# discontinuous map with cuts
grid_merge$cuts <- cut(grid_merge$agg_WTP, c(-1000, 0, 100, 200, 350, 1000, 3000, 5000, 8600))
ggplot(data=grid_merge) +
geom_tile(aes(x = x, y = y, fill = cuts)) +
coord_equal() +
annotation_scale(location = "bl", width_hint = 0.1,plot_unit="m", line_width = 0.1) +
annotation_north_arrow(location = "br" ,pad_x = unit(0, "in"), pad_y = unit(0, "in"),
style = north_arrow_fancy_orienteering) +
labs(fill="Aggregated WTP (€/ha)") +
xlab("") +
ylab("") +
scale_fill_viridis(option="C", direction = 1, discrete = T,
labels=c("< 0", "0 - 100", "100 - 200", "200 - 350", "350 - 1,000", "1,000 - 3,000", "3,000 - 5,000",
" > 5,000"))
ggsave("Figures/agg_wtp_hnv_ex_disc.png", dpi="print", width=6, height = 5)
? Orthogonal Design
design
;alts = alt1*, alt2*, alt3
;rows = 72
;block = 6
;orth = sim
;con
;model:
U(alt1) = b1 + b2 * B[0,1,2,3] + b3 * C[0,1,2,3] + b5 * P[40,80,120,160]/
U(alt2) = b1 + b2 * B + b3 * C + b5 * P
;formatTitle = 'Scenario <scenarionumber> Block <blocknumber>'
;formatTableDimensions = 3, 5
;formatTable:
1,1 = '' /
1,2 = 'Share of protected areas' /
1,3 = 'Share of HNV' /
1,4 = 'Cost per year' /
1,5 = 'Choice question&:' /
2,1 = 'alt1' /
2,2 = '<alt1.b>' /
2,3 = '<alt1.c>' /
2,4 = '<alt1.p>' /
2,5 = '' /
3,1 = 'alt2' /
3,2 = '<alt2.b>' /
3,3 = '<alt2.c>' /
3,4 = '<alt2.p>' /
3,5 = ''
;formatTableStyle:
1,1 = 'default' /
1,2 = 'headingattribute' /
1,3 = 'headingattribute' /
1,4 = 'headingattribute' /
1,5 = 'headingattribute' /
2,1 = 'heading1' /
2,2 = 'body1' /
2,3 = 'body1' /
2,4 = 'body1' /
2,5 = 'choice1' /
3,1 = 'heading2' /
3,2 = 'body2' /
3,3 = 'body2' /
3,4 = 'body2' /
3,5 = 'choice2'
;formatStyleSheet = Default.css
;formatAttributes:
alt1.b(0=#0% more, 1=#0% more, 2=#0% more, 3=#0% more) /
alt1.c(0=#0% more, 1=#0% more, 2=#0% more, 3=#0% more) /
alt1.p(40=#, 80=#, 120=#, 160=#) /
alt2.b(0=#0% more, 1=#0% more, 2=#0% more, 3=#0% more) /
alt2.c(0=#0% more, 1=#0% more, 2=#0% more, 3=#0% more) /
alt2.p(40=#, 80=#, 120=#, 160=#)
$
??? Baysian Efficient Design 1
design
;alts = alt1*, alt2*, alt3
;rows = 36
;block = 3
;eff = (mnl,d,mean)
;rep = 1000
;bdraws = halton(1000)
;con
;model:
U(alt1) = b1[(n,-1.2,0.1)] + b2[(n,0.1,0.02)] * B[0,1] + b3[(n,0.4,0.1)] * C[0,1] + b4[(n,0.3,0.1)] * D[0,1] + b5[(n,0.02,0.001)] * P[35:83:6]/
U(alt2) = b6[(n,-1.4,0.1)] + b2 * B + b3 * C + b4 * D + b5 * P
?U(alt3) =
;formatTitle = 'Scenario <scenarionumber>'
;formatTableDimensions = 3, 6
;formatTable:
1,1 = '' /
1,2 = 'Results' /
1,3 = 'Beratung' /
1,4 = 'Partner' /
1,5 = 'Kompensation' /
1,6 = 'Choice question&:' /
2,1 = 'alt1' /
2,2 = '<alt1.b>' /
2,3 = '<alt1.c>' /
2,4 = '<alt1.d>' /
2,5 = '<alt1.p>' /
2,6 = '' /
3,1 = 'alt2' /
3,2 = '<alt2.b>' /
3,3 = '<alt2.c>' /
3,4 = '<alt2.d>' /
3,5 = '<alt2.p>' /
3,6 = ''
;formatTableStyle:
1,1 = 'default' /
1,2 = 'headingattribute' /
1,3 = 'headingattribute' /
1,4 = 'headingattribute' /
1,5 = 'headingattribute' /
1,6 = 'headingattribute' /
2,1 = 'heading1' /
2,2 = 'body1' /
2,3 = 'body1' /
2,4 = 'body1' /
2,5 = 'body1' /
2,6 = 'choice1' /
3,1 = 'heading2' /
3,2 = 'body2' /
3,3 = 'body2' /
3,4 = 'body2' /
3,5 = 'body2' /
3,6 = 'choice2'
;formatStyleSheet = Default.css
;formatAttributes:
alt1.b(0=Results, 1=Action) /
alt1.c(0=Keine Beratung, 1=Mit Beratung) /
alt1.d(0=Keine Partner, 1=Mit Partner) /
alt1.p(35=#0, 41=#0, 47=#0, 53=#0, 59=#0, 65=#0, 71=#0, 77=#0, 83=#0) /
alt2.b(0=Results, 1=Action) /
alt2.c(0=Keine Beratung, 1=Mit Beratung) /
alt2.d(0=Keine Partner, 1=Mit Partner) /
alt2.p(35=#0, 41=#0, 47=#0, 53=#0, 59=#0, 65=#0, 71=#0, 77=#0, 83=#0)
$
?Efficient Design
design
;alts = alt1*, alt2*, alt3
;rows = 36
;block = 3
;eff = (mnl,d)
;rep = 1000
;con
;model:
U(alt1) = b1[-1.2] + bPROT[0.1] * B[0,10,20] + bHNV[0.1] * C[0,10,20] + bPROTACC[0.2] * D[0,1,2] + bHNVACC[0.1] * E[0,1] +bPRICE[-0.02] *P[40,80,120,160,200,300]/
U(alt2) = b1 + bPROT * B + bHNV * C + bPROTACC * D + bHNVACC * E +bPRICE *P[40,80,120,160,200,300]
?U(alt3) =
$
?Bad prior Design
design
;alts = alt1*, alt2*, alt3
;rows = 18
;block = 2
;eff = (mnl,d)
;rep = 1000
?;con
;model:
U(alt1) = b1[-10] + b2[9] * B[0,1] + b3[4] * C[0,1] + b4[0.3] * D[0,1] + b5[0.02] * P[35:83:6]/
U(alt2) = b6[-1.4] + b2 * B + b3 * C + b4 * D + b5 * P
;formatTitle = 'Scenario <scenarionumber>'
;formatTableDimensions = 3, 6
;formatTable:
1,1 = '' /
1,2 = 'Results' /
1,3 = 'Beratung' /
1,4 = 'Partner' /
1,5 = 'Kompensation' /
1,6 = 'Choice question&:' /
2,1 = 'alt1' /
2,2 = '<alt1.b>' /
2,3 = '<alt1.c>' /
2,4 = '<alt1.d>' /
2,5 = '<alt1.p>' /
2,6 = '' /
3,1 = 'alt2' /
3,2 = '<alt2.b>' /
3,3 = '<alt2.c>' /
3,4 = '<alt2.d>' /
3,5 = '<alt2.p>' /
3,6 = ''
;formatTableStyle:
1,1 = 'default' /
1,2 = 'headingattribute' /
1,3 = 'headingattribute' /
1,4 = 'headingattribute' /
1,5 = 'headingattribute' /
1,6 = 'headingattribute' /
2,1 = 'heading1' /
2,2 = 'body1' /
2,3 = 'body1' /
2,4 = 'body1' /
2,5 = 'body1' /
2,6 = 'choice1' /
3,1 = 'heading2' /
3,2 = 'body2' /
3,3 = 'body2' /
3,4 = 'body2' /
3,5 = 'body2' /
3,6 = 'choice2'
;formatStyleSheet = Default.css
;formatAttributes:
alt1.b(0=Results, 1=Action) /
alt1.c(0=Keine Beratung, 1=Mit Beratung) /
alt1.d(0=Keine Partner, 1=Mit Partner) /
alt1.p(35=#0, 41=#0, 47=#0, 53=#0, 59=#0, 65=#0, 71=#0, 77=#0, 83=#0) /
alt2.b(0=Results, 1=Action) /
alt2.c(0=Keine Beratung, 1=Mit Beratung) /
alt2.d(0=Keine Partner, 1=Mit Partner) /
alt2.p(35=#0, 41=#0, 47=#0, 53=#0, 59=#0, 65=#0, 71=#0, 77=#0, 83=#0)
$
library(raster)
library(terra)
library(sf)
library("rnaturalearth")
library("rnaturalearthdata")
library("exactextractr")
designpath<- "Projects/ValuGaps/Designs/"
hnv <- rast("Projects/ValuGaps/data/gis/hnv_germany.tif")
# Load in HNV raster data (preprocessed)
# Load BFN protected areas data
protected_areas <- readRDS("Projects/ValuGaps/data/gis/protected_bfn_raster.rds")
##############################################################################################
# Load in Protected areas data (preprocessed) (protected planet data, comment out if use bfn data)
# protected_areas_all <- readRDS("Projects/ValuGaps/data/gis/germany_protected.rds")
# levels(protected_areas_all)
#
# categories_to_drop <- c(1,3, 4, 6, 8, 9, 11) # define categories that should not be included in protected areas
# # keeps nationalparks, nature conservation areas and UNESCO biosphere reserve
#
# protected_areas <- protected_areas_all
# protected_areas[protected_areas %in% categories_to_drop] <-NA # Make binary categories
#
# names(protected_areas) <- "value"
##############################################################################################
# Code protected areas as zero/one
protected_areas <- ifel(is.na(protected_areas), 0, 1)
# Load in shapefile of germany
germany_sf <- read_sf("Projects/ValuGaps/data/gis/WDPA_WDOECM_Feb2023_Public_DEU_shp/gadm41_DEU_shp", "gadm41_DEU_0") # no county borders
germany_sf_county <- read_sf("Projects/ValuGaps/data/gis/WDPA_WDOECM_Feb2023_Public_DEU_shp/gadm41_DEU_shp", "gadm41_DEU_2")
germany_geo <- st_transform(germany_sf$geometry, crs=crs(protected_areas)) # transform to raster crs
germany_geo_county <- st_transform(germany_sf_county$geometry, crs=crs(protected_areas))
#### Read in CORINE Land cover data ####
corine <- rast("Projects/ValuGaps/data/gis/corine_raster_2018/DATA/U2018_CLC2018_V2020_20u1.tif")
germany_geo_c <- st_transform(germany_sf_county$geometry, crs=crs(corine)) # used to crop corine data to Germany
corine <- crop(corine, germany_geo_c)
corine <- mask(corine, vect(germany_geo_c))
levels(corine)
hnv_categories <- c(12, 14:22, 26:29, 32, 35:37) # see p.37 HNV EUROPE DATA Documentation
corine_hnv <- corine
corine_hnv[!(corine_hnv %in% hnv_categories)] <-NA # Make binary categories
corine_hnv <- ifel(is.na(corine_hnv), 0, 1)
corine_hnv <- mask(corine_hnv, vect(germany_geo_c))
corine_hnv <- mask(corine_hnv, hnv)
#### Protected areas CORINE
protected_area_categories <- c(10, 12:41) # remove artificial areas except UGS and remove marine waters
corine_pa <- corine
corine_pa[!(corine_pa %in% protected_area_categories)] <-NA # Make binary categories
corine_pa <- ifel(is.na(corine_pa), 0, 1)
corine_pa <- mask(corine_pa, vect(germany_geo_c))
protected_areas <- mask(protected_areas, vect(germany_geo_c))
####
#### Define simulation parameters ####
resps = 96 # number of respondents
nosim=1 # number of simulations to run (about 500 is minimum)
#buffer_dist <- 15000
buffer_dist_ls <- list(10000, 15000, 25000, 50000) # define radius for the respondents in meter
# betacoefficients should not include "-"
basc = 0.5
bb = 1.2
bb2 = -0.016
bc= 1.4
bc2 = -0.0175
bp=-0.03
mani1 = expr(left_join(final_set, by="ID"))
manipulations = list(
#expr(across(c("HNV_on_corine", "Protected_on_corine"), ~replace_na(.,0)) ),
alt1.protected = expr(alt1.b+Protected_on_corine*100),
alt2.protected = expr(alt2.b+Protected_on_corine*100),
alt3.protected = expr(Protected_on_corine*100),
alt1.HNV = expr(alt1.b+HNV_on_corine*100),
alt2.HNV = expr(alt2.b+HNV_on_corine*100),
alt3.HNV = expr(HNV_on_corine*100),
expr(across(.cols=matches("HNV|protected"),.fns = ~.x^2, .names = "{.col}{'sq'}" ) )
)
#place your utility functions here
u<-list( u1= list(
v1 =V.1~ basc + bb * alt1.protected + bb2 * alt1.protectedsq + bc * alt1.HNV + bc2 * alt1.HNVsq + bp * alt1.p ,
v2 =V.2~ basc + bb * alt2.protected + bb2 * alt2.protectedsq + bc * alt2.HNV + bc2 * alt2.HNVsq + bp * alt2.p ,
v3 =V.3~ bb * alt3.protected + bb2 * alt3.protectedsq + bc * alt3.HNV + bc2 * alt3.HNVsq )
)
## this function will be called before the data is simulated
sou_gis <- function() {
source("Projects/ValuGaps/code/GIS_data.R")
}
Design Choice situation opt1.cow opt1.adv opt1.vet opt1.far opt1.met opt1.bon opt2.cow opt2.adv opt2.vet opt2.far opt2.met opt2.bon Block
1 1 1 0 1 0 1 5 0 1 1 1 3 3 2
1 2 1 1 1 1 0 2 1 0 0 0 3 7 2
1 3 0 1 0 1 2 7 1 0 0 0 0 6 1
1 4 0 1 0 0 1 4 0 0 1 1 2 5 1
1 5 1 1 1 0 0 4 0 0 0 1 1 2 2
1 6 0 0 1 1 2 6 1 1 0 0 0 5 2
1 7 0 0 0 0 2 3 1 1 1 1 1 0 2
1 8 0 0 1 0 0 7 1 1 0 1 2 4 1
1 9 1 1 1 0 3 6 0 0 1 1 1 4 1
1 10 0 1 0 1 0 0 1 0 1 0 3 1 1
1 11 1 0 1 0 2 0 0 1 0 1 0 6 1
1 12 1 0 1 1 1 3 0 1 1 0 2 1 1
1 13 0 0 0 1 3 1 1 1 0 0 2 2 1
1 14 1 1 0 1 1 1 0 0 1 0 3 0 2
1 15 0 1 0 0 3 5 1 0 0 1 0 7 2
1 16 1 0 0 1 3 2 0 1 1 0 1 3 2
||||||||||
design
;alts = opt1*, opt2*, opt3
;eff = (mnl, d)
;alg = swap
;rows = 16
;block = 2
;model:
U(opt1) = b1[0.2] * COW[0,1]
+ b2[0.2] * ADV[0,1]
+ b3[0.2] * VET[0,1]
+ b4[0.2] * FAR[0,1]
+ b5.dummy[0.1|0.2|0.3] * MET[1,2,3,0]
+ b6.dummy[0.3|0.5|0.65|0.75|0.8|0.83|0.85] * BON[1,2,3,4,5,6,7,0]
+ i1[0] * COW.dummy[0] * VET.dummy[1]
/
U(opt2) = b1 * COW
+ b2 * ADV
+ b3 * VET
+ b4 * FAR
+ b5 * MET
+ b6 * BON
+ i1 * COW.dummy[0] * VET.dummy[1]
/
U(opt3) = asc3[0.2]
;
$
\ No newline at end of file
Design Choice situation opt1.cow opt1.adv opt1.vet opt1.far opt1.met opt1.bon opt2.cow opt2.adv opt2.vet opt2.far opt2.met opt2.bon Block
1 1 1 1 1 1 1 1 1 0 0 0 3 7 2
1 2 1 1 1 0 2 6 0 0 1 1 2 1 2
1 3 0 1 0 1 3 0 0 0 1 0 0 4 2
1 4 1 0 1 0 1 2 1 1 0 1 2 7 1
1 5 0 0 0 0 1 3 1 1 0 1 3 2 2
1 6 1 1 1 1 0 5 0 0 1 0 4 3 1
1 7 0 1 0 1 4 2 1 0 1 0 0 6 1
1 8 0 0 0 0 2 7 0 1 1 1 2 0 2
1 9 0 0 1 1 3 6 1 1 0 0 1 2 2
1 10 1 0 0 0 4 3 0 1 1 1 0 6 1
1 11 1 0 0 1 0 4 0 1 0 0 4 5 1
1 12 1 1 0 0 0 4 1 0 1 1 4 4 2
1 13 0 1 1 0 3 5 1 0 0 1 1 3 1
1 14 0 1 1 0 2 0 0 0 0 1 1 0 1
1 15 0 0 0 1 0 7 1 1 1 0 3 1 1
1 16 1 0 1 1 4 1 0 1 0 0 0 5 2
||||||||||
design
;alts = opt1*, opt2*, opt3
;eff = (mnl, d)
;alg = swap
;rows = 16
;block = 2
;model:
U(opt1) = b1[0.2] * COW[0,1]
+ b2[0.2] * ADV[0,1]
+ b3[0.2] * VET[0,1]
+ b4[0.2] * FAR[0,1]
+ b5[0.1] * MET[0,1,2,3,4]
+ b6[0.2] * BON[0,1,2,3,4,5,6,7]
+ i1[0] * COW.dummy[0] * VET.dummy[1]
/
U(opt2) = b1 * COW
+ b2 * ADV
+ b3 * VET
+ b4 * FAR
+ b5 * MET
+ b6 * BON
+ i1 * COW.dummy[0] * VET.dummy[1]
/
U(opt3) = asc3[0.2]
;
$
\ No newline at end of file
Design Choice situation opt1.cow opt1.adv opt1.vet opt1.far opt1.met opt1.bon opt2.cow opt2.adv opt2.vet opt2.far opt2.met opt2.bon Block
1 1 0 0 0 0 0 1 1 0 1 0 3 0 8
1 2 0 0 0 0 1 6 0 1 1 1 0 5 1
1 3 0 0 0 0 2 1 0 1 0 0 0 0 4
1 4 0 0 0 0 3 6 0 0 0 0 2 1 5
1 5 0 1 0 0 3 3 0 1 1 1 1 2 5
1 6 0 1 0 0 2 0 0 1 1 1 2 5 3
1 7 0 1 0 0 1 3 1 0 0 0 2 4 1
1 8 0 1 0 0 0 0 0 0 0 0 1 6 6
1 9 1 0 0 0 3 7 0 0 0 1 2 3 2
1 10 1 0 0 0 2 4 0 0 1 1 3 7 8
1 11 1 0 0 0 1 7 0 0 0 1 1 0 4
1 12 1 0 0 0 0 4 0 1 1 0 3 4 4
1 13 1 1 0 0 0 5 0 0 1 0 2 2 3
1 14 1 1 0 0 1 2 1 0 1 1 3 6 2
1 15 1 1 0 0 2 5 1 0 1 1 0 1 5
1 16 1 1 0 0 3 2 1 0 0 1 2 2 7
1 17 0 0 1 0 0 2 1 0 1 1 1 6 4
1 18 0 0 1 0 1 5 0 1 1 0 0 7 1
1 19 0 0 1 0 2 2 0 1 0 0 3 3 7
1 20 0 0 1 0 3 5 1 0 0 1 3 5 2
1 21 0 1 1 0 3 4 1 1 1 0 3 1 5
1 22 0 1 1 0 2 7 1 0 1 0 0 3 1
1 23 0 1 1 0 1 4 0 0 0 0 0 1 5
1 24 0 1 1 0 0 7 0 1 1 1 3 2 8
1 25 1 0 1 0 3 0 1 0 1 0 2 3 4
1 26 1 0 1 0 2 3 1 0 0 1 1 5 6
1 27 1 0 1 0 1 0 1 0 0 0 0 4 4
1 28 1 0 1 0 0 3 0 1 0 0 1 3 6
1 29 1 1 1 0 0 6 1 1 0 0 3 2 6
1 30 1 1 1 0 1 1 0 0 1 0 3 5 7
1 31 1 1 1 0 2 6 1 0 0 0 1 7 3
1 32 1 1 1 0 3 1 1 1 1 1 2 0 7
1 33 0 0 0 1 3 0 1 1 1 1 3 3 8
1 34 0 0 0 1 2 3 0 0 1 1 1 7 5
1 35 0 0 0 1 1 0 0 0 1 1 2 4 3
1 36 0 0 0 1 0 3 1 1 0 1 3 4 2
1 37 0 1 0 1 0 6 0 0 0 0 3 6 8
1 38 0 1 0 1 1 1 0 0 1 0 1 5 6
1 39 0 1 0 1 2 6 1 0 1 1 2 1 2
1 40 0 1 0 1 3 1 1 1 1 0 0 6 8
1 41 1 0 0 1 0 2 0 1 0 1 2 6 5
1 42 1 0 0 1 1 5 1 1 0 1 0 7 7
1 43 1 0 0 1 2 2 0 1 1 0 2 7 1
1 44 1 0 0 1 3 5 1 1 1 0 1 1 8
1 45 1 1 0 1 3 4 0 1 1 0 1 4 3
1 46 1 1 0 1 2 7 0 0 0 1 3 0 4
1 47 1 1 0 1 1 4 1 0 1 0 1 0 7
1 48 1 1 0 1 0 7 0 0 1 0 0 2 3
1 49 0 0 1 1 3 7 1 1 1 0 2 6 3
1 50 0 0 1 1 2 4 1 0 0 0 3 7 1
1 51 0 0 1 1 1 7 0 1 0 1 0 6 7
1 52 0 0 1 1 0 4 1 1 0 0 1 2 4
1 53 0 1 1 1 0 5 1 1 0 0 2 5 5
1 54 0 1 1 1 1 2 0 1 0 1 1 1 2
1 55 0 1 1 1 2 5 0 0 1 1 0 4 8
1 56 0 1 1 1 3 2 1 1 0 1 1 4 6
1 57 1 0 1 1 0 1 1 0 0 1 0 2 3
1 58 1 0 1 1 1 6 1 1 0 0 0 5 6
1 59 1 0 1 1 2 1 1 1 1 1 1 3 1
1 60 1 0 1 1 3 6 0 0 0 1 0 3 6
1 61 1 1 1 1 3 3 1 1 1 1 0 0 2
1 62 1 1 1 1 2 0 0 1 0 0 2 0 1
1 63 1 1 1 1 1 3 0 1 0 1 3 1 2
1 64 1 1 1 1 0 0 1 1 0 1 2 7 7
||||||||||
design
;alts = opt1*, opt2*, opt3
;orth = seq
;rows = 64
;block = 8
;model:
U(opt1) = b1[0.2] * COW[0,1]
+ b2[0.2] * ADV[0,1]
+ b3[0.2] * VET[0,1]
+ b4[0.2] * FAR[0,1]
+ b5.dummy[0.1|0.2|0.3] * MET[1,2,3,0]
+ b6.dummy[0.3|0.5|0.65|0.75|0.8|0.83|0.85] * BON[1,2,3,4,5,6,7,0]
+ i1[0] * COW.dummy[0] * VET.dummy[1]
/
U(opt2) = b1 * COW
+ b2 * ADV
+ b3 * VET
+ b4 * FAR
+ b5 * MET
+ b6 * BON
+ i1 * COW.dummy[0] * VET.dummy[1]
/
U(opt3) = asc3[0.2]
;
$
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment