F2023 tokenize (second form)
This commit is contained in:
parent
84af516cdb
commit
c4d061ba0a
78
src/IO.f90
78
src/IO.f90
|
@ -48,7 +48,8 @@ implicit none(type,external)
|
|||
IO_color, &
|
||||
IO_error, &
|
||||
IO_warning, &
|
||||
IO_STDOUT
|
||||
IO_STDOUT, &
|
||||
tokenize
|
||||
|
||||
contains
|
||||
|
||||
|
@ -742,6 +743,33 @@ pure function CRLF2LF(str)
|
|||
end function CRLF2LF
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief Fortran 2023 tokenize (first form).
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
pure subroutine tokenize(string,set,tokens)
|
||||
|
||||
character(len=*), intent(in) :: string, set
|
||||
character(len=:), dimension(:), allocatable, intent(out) :: tokens
|
||||
|
||||
integer, allocatable, dimension(:,:) :: pos
|
||||
integer :: i, s, e
|
||||
|
||||
|
||||
allocate(pos(2,0))
|
||||
e = 0
|
||||
do while (e < verify(string,set,back=.true.))
|
||||
s = e + merge(verify(string(e+1:),set),1,scan(string(e+1:),set)/=0)
|
||||
e = s + merge(scan(string(s:),set)-2,len(string(s:))-1,scan(string(s:),set)/=0)
|
||||
pos = reshape([pos,[s,e]],[2,size(pos)/2+1])
|
||||
end do
|
||||
allocate(character(len=merge(maxval(pos(2,:)-pos(1,:))+1,0,size(pos)>0))::tokens(size(pos,2)))
|
||||
do i = 1, size(pos,2)
|
||||
tokens(i) = string(pos(1,i):pos(2,i))
|
||||
end do
|
||||
|
||||
end subroutine tokenize
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief Write statements to standard error.
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
|
@ -808,6 +836,7 @@ subroutine IO_selfTest()
|
|||
|
||||
integer, dimension(:), allocatable :: chunkPos
|
||||
character(len=:), allocatable :: str,out
|
||||
character(len=:), dimension(:), allocatable :: tokens
|
||||
|
||||
|
||||
if (dNeq(1.0_pREAL, IO_strAsReal('1.0'))) error stop 'IO_strAsReal'
|
||||
|
@ -887,6 +916,53 @@ subroutine IO_selfTest()
|
|||
if ('abc,'//IO_EOL//'xxdefg,'//IO_EOL//'xxhij' /= IO_wrapLines('abc,defg, hij',filler='xx',length=4)) &
|
||||
error stop 'IO_wrapLines/7'
|
||||
|
||||
call tokenize('','$',tokens)
|
||||
if (size(tokens) /= 0 .or. len(tokens) /=0) error stop 'tokenize empty'
|
||||
call tokenize('abcd','dcba',tokens)
|
||||
if (size(tokens) /= 0 .or. len(tokens) /=0) error stop 'tokenize empty'
|
||||
|
||||
tokens=['a']
|
||||
call test_tokenize('a','#',tokens)
|
||||
call test_tokenize('#a','#',tokens)
|
||||
call test_tokenize('a#','#',tokens)
|
||||
|
||||
tokens=['aa']
|
||||
call test_tokenize('aa','#',tokens)
|
||||
call test_tokenize('$aa','$',tokens)
|
||||
call test_tokenize('aa$','$',tokens)
|
||||
|
||||
tokens=['a','b']
|
||||
call test_tokenize('a$b','$',tokens)
|
||||
call test_tokenize('@a@$b@','$@',tokens)
|
||||
|
||||
tokens=['aa','bb']
|
||||
call test_tokenize('aa$bb','$',tokens)
|
||||
call test_tokenize('aa$$bb','$',tokens)
|
||||
call test_tokenize('aa$bb$','$',tokens)
|
||||
|
||||
tokens=['aa ','bbb ','cccc']
|
||||
call test_tokenize('aa$bbb$cccc','$',tokens)
|
||||
call test_tokenize('$aa$bbb$cccc$','$',tokens)
|
||||
call tokenize('#aa@@bbb!!!cccc#','#@!',tokens)
|
||||
|
||||
|
||||
contains
|
||||
subroutine test_tokenize(input,delimiter,solution)
|
||||
|
||||
character(len=*), intent(in) :: input, delimiter
|
||||
character(len=*), dimension(:), intent(in) :: solution
|
||||
|
||||
character(len=:), dimension(:), allocatable :: tok
|
||||
integer :: i
|
||||
|
||||
|
||||
call tokenize(input,delimiter,tok)
|
||||
do i = 1,size(tok)
|
||||
if (solution(i) /= tok(i)) error stop 'tokenize "'//solution(i)//'" vs. "'//tok(i)//'"'
|
||||
end do
|
||||
|
||||
end subroutine test_tokenize
|
||||
|
||||
end subroutine IO_selfTest
|
||||
|
||||
end module IO
|
||||
|
|
Loading…
Reference in New Issue